diff --git a/AMDiS/src/DOFMatrix.cc b/AMDiS/src/DOFMatrix.cc
index af9d35ae1e0a3719d722efc4f359b525e399cb1c..3fbae0974050a6ba3e6d2345034104d93fd38d83 100644
--- a/AMDiS/src/DOFMatrix.cc
+++ b/AMDiS/src/DOFMatrix.cc
@@ -534,12 +534,10 @@ namespace AMDiS {
 
     // Do the following only in sequential code. In parallel mode, the specific
     // solver method must care about dirichlet boundary conditions.
-#ifndef HAVE_PARALLEL_DOMAIN_AMDIS
     inserter_type &ins = *inserter;  
     for (std::set<int>::iterator it = dirichletDofs.begin(); 
 	 it != dirichletDofs.end(); ++it)
 	ins[*it][*it] = 1.0;
-#endif
   }
 
 
diff --git a/AMDiS/src/Element.h b/AMDiS/src/Element.h
index eb32b83657638428d7bc1d0570bec07c72dcec0c..57024e4474e6b0a7071525f70bb22aa4a13c6165 100644
--- a/AMDiS/src/Element.h
+++ b/AMDiS/src/Element.h
@@ -438,9 +438,6 @@ namespace AMDiS {
 		       bool baseDofPtr = false,
 		       vector<GeoIndex>* dofGeoIndex = NULL) const = 0;
 
-    virtual void getSubBoundary(BoundaryObject bound, 
-				vector<BoundaryObject> &subBound) const = 0;
-
     /// Combines \ref getNodeDofs and \ref getHigherOrderDofs to one function. 
     /// See parameter description there.
     void getAllDofs(const FiniteElemSpace* feSpace, 
@@ -449,6 +446,8 @@ namespace AMDiS {
 		    bool baseDofPtr = false,
 		    vector<GeoIndex>* dofGeoIndex = NULL);
 
+    virtual void getSubBoundary(BoundaryObject bound, 
+				vector<BoundaryObject> &subBound) const = 0;
 
     /** \} */
 
diff --git a/AMDiS/src/Tetrahedron.cc b/AMDiS/src/Tetrahedron.cc
index ce8e11ee8b52c5e47e8e9fd9c395380f86665f22..d8510e88721473595a438d37fb6fe66580f654f0 100644
--- a/AMDiS/src/Tetrahedron.cc
+++ b/AMDiS/src/Tetrahedron.cc
@@ -345,8 +345,6 @@ namespace AMDiS {
   {
     FUNCNAME("Tetrahedron::getHigherOrderDofs()");
 
-    TEST_EXIT(dofGeoIndex != NULL)("Not yet implemented!\n");
-
     switch (bound.subObj) {
     case VERTEX:
       return;
@@ -394,14 +392,20 @@ namespace AMDiS {
 	  if (baseDofPtr) {
 	    do {
 	      if (elDofIter.getCurrentPos() == 1 && 
-		  elDofIter.getCurrentElementPos() == bound.ithObj)
+		  elDofIter.getCurrentElementPos() == bound.ithObj) {
 		dofs.push_back(elDofIter.getBaseDof());	
+		if (dofGeoIndex != NULL)
+		  dofGeoIndex->push_back(EDGE);
+	      }
 	    } while (elDofIter.nextStrict());
 	  } else {
 	    do {
 	      if (elDofIter.getCurrentPos() == 1 && 
-		  elDofIter.getCurrentElementPos() == bound.ithObj)
+		  elDofIter.getCurrentElementPos() == bound.ithObj) {
 		dofs.push_back(elDofIter.getDofPtr());	
+		if (dofGeoIndex != NULL)
+		  dofGeoIndex->push_back(EDGE);
+	      }
 	    } while (elDofIter.next());
 	  }
 	}
@@ -432,21 +436,27 @@ namespace AMDiS {
 					 baseDofPtr, dofGeoIndex);
 	  }
 	} else { 
+
 	  ElementDofIterator elDofIter(feSpace, true);
 	  elDofIter.reset(this);
 	  if (baseDofPtr) {
 	    do {
-	      if (elDofIter.getCurrentPos() == 2 && 
+	      if (elDofIter.getCurrentPos() >= 1 && 
 		  elDofIter.getCurrentElementPos() == bound.ithObj)
 		dofs.push_back(elDofIter.getBaseDof());	
+		if (dofGeoIndex != NULL)
+		  dofGeoIndex->push_back(elDofIter.getPosIndex());
 	    } while (elDofIter.nextStrict());
 	  } else {
 	    do {
-	      if (elDofIter.getCurrentPos() == 2 && 
+	      if (elDofIter.getCurrentPos() >= 1 && 
 		  elDofIter.getCurrentElementPos() == bound.ithObj)
 		dofs.push_back(elDofIter.getDofPtr());	
+		if (dofGeoIndex != NULL)
+		  dofGeoIndex->push_back(elDofIter.getPosIndex());
 	    } while (elDofIter.next());      
 	  }
+
 	}
       }
       break;
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index a7b916a4c92d22cbf37a2978800610d1db43019d..f2a02ccc625858f5711f08f4841db3c8e4bd4ce3 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -65,7 +65,7 @@ namespace AMDiS {
 
     matAssembly();
 
-    removeDirichletRows(seqMat);
+    //    removeDirichletRows(seqMat);
     
     if (printMatInfo) {
       MatInfo matInfo;
@@ -143,6 +143,8 @@ namespace AMDiS {
 	ParallelDofMapping *rowCoarseSpace = coarseSpaceMap[rowComponent];
 	ParallelDofMapping *colCoarseSpace = coarseSpaceMap[colComponent];
 
+	std::set<DegreeOfFreedom> &dirichletRows = dofMat->getDirichletRows();
+
 	traits::col<Matrix>::type col(dofMat->getBaseMatrix());
 	traits::const_value<Matrix>::type value(dofMat->getBaseMatrix());
 	
@@ -151,6 +153,14 @@ namespace AMDiS {
 	       cend = end<row>(dofMat->getBaseMatrix()); cursor != cend; ++cursor) {
 
 	  bool isRowCoarse = isCoarseSpace(rowComponent, *cursor);
+
+	  // For the case, this is a dirichlet row we have to check whether the 
+	  // rank is also owner of this row DOF.
+	  if (dirichletRows.count(*cursor)) {
+	    if ((!isRowCoarse && !(*interiorMap)[rowComponent].isRankDof(*cursor)) ||
+		(isRowCoarse && !(*rowCoarseSpace)[rowComponent].isRankDof(*cursor)))
+	      continue;
+	  }
   
 	  cols.clear();
 	  colsOther.clear();
@@ -236,7 +246,7 @@ namespace AMDiS {
 
     matAssembly();
 
-    removeDirichletRows(seqMat);
+    //    removeDirichletRows(seqMat);
 
     // === Create solver for the non primal (thus local) variables. ===
 
@@ -279,7 +289,7 @@ namespace AMDiS {
 
     vecRhsAssembly();
 
-    removeDirichletRows(vec);
+    //    removeDirichletRows(vec);
 
     // === For debugging allow to write the rhs vector to a file. ===
 
@@ -575,6 +585,15 @@ namespace AMDiS {
     }
 
     if (hasCoarseSpace()) {
+      MatZeroRows(getMatInteriorCoarse(0), 
+		  dRowsInterior.size(), &(dRowsInterior[0]), 0.0, PETSC_NULL, PETSC_NULL);
+      MatZeroRows(getMatInteriorCoarse(1), 
+		  dRowsInterior.size(), &(dRowsInterior[0]), 0.0, PETSC_NULL, PETSC_NULL);
+
+
+      MatZeroRows(getMatCoarseInterior(0), 
+		  dRowsCoarse.size(), &(dRowsCoarse[0]), 0.0, PETSC_NULL, PETSC_NULL);
+
       Mat mpiMat = getMatCoarse();
       MatZeroRows(mpiMat, dRowsCoarse.size(), &(dRowsCoarse[0]), 0.0,
 		  PETSC_NULL, PETSC_NULL);
@@ -745,7 +764,7 @@ namespace AMDiS {
 
 
   void PetscSolverGlobalMatrix::setDofMatrix(DOFMatrix* seqMat,
-					     int nRowMat, int nColMat)
+					     int rowComp, int colComp)
   {
     FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()");
 
@@ -770,7 +789,8 @@ namespace AMDiS {
 
     // Get periodic mapping object
     PeriodicMap &perMap = meshDistributor->getPeriodicMap();
-
+    std::set<DegreeOfFreedom> &dirichletRows = seqMat->getDirichletRows();
+      
     const FiniteElemSpace *rowFe = seqMat->getRowFeSpace();
     const FiniteElemSpace *colFe = seqMat->getColFeSpace();
 
@@ -781,7 +801,7 @@ namespace AMDiS {
 	   cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
       // Global index of the current row DOF.
       MultiIndex rowMultiIndex;
-      if ((*interiorMap)[nRowMat].find(*cursor, rowMultiIndex) == false)
+      if ((*interiorMap)[rowComp].find(*cursor, rowMultiIndex) == false)
 	continue;
 
       int globalRowDof = rowMultiIndex.global;
@@ -789,11 +809,15 @@ namespace AMDiS {
       // Test if the current row DOF is a periodic DOF.
       bool periodicRow = perMap.isPeriodic(rowFe, globalRowDof);
 
+      // Dirichlet rows can be set only be the owner ranks.
+      if (dirichletRows.count(*cursor) && !((*interiorMap)[rowComp].isRankDof(*cursor)))
+	continue;
+
       if (!periodicRow) {
 	// === Row DOF index is not periodic. ===
 
 	// Get PETSc's mat row index.
-	int rowIndex = interiorMap->getMatIndex(nRowMat, globalRowDof);
+	int rowIndex = interiorMap->getMatIndex(rowComp, globalRowDof);
 
 	cols.clear();
 	values.clear();
@@ -803,14 +827,14 @@ namespace AMDiS {
 
 	  // Global index of the current column index.
 	  MultiIndex colMultiIndex;
-	  if ((*interiorMap)[nColMat].find(col(*icursor), colMultiIndex) == false)
+	  if ((*interiorMap)[colComp].find(col(*icursor), colMultiIndex) == false)
 	    continue;
 
 	  int globalColDof = colMultiIndex.global;
 	  // Test if the current col dof is a periodic dof.
 	  bool periodicCol = perMap.isPeriodic(colFe, globalColDof);
 	  // Get PETSc's mat col index.
-	  int colIndex = interiorMap->getMatIndex(nColMat, globalColDof);
+	  int colIndex = interiorMap->getMatIndex(colComp, globalColDof);
 
 	  // Ignore all zero entries, expect it is a diagonal entry.
  	  if (value(*icursor) == 0.0 && rowIndex != colIndex)
@@ -839,7 +863,7 @@ namespace AMDiS {
 	    vector<int> newCols;
 	    perMap.mapDof(colFe, globalColDof, perAsc, newCols);
 	    for (unsigned int i = 0; i < newCols.size(); i++) {
-	      cols.push_back(interiorMap->getMatIndex(nColMat, newCols[i]));
+	      cols.push_back(interiorMap->getMatIndex(colComp, newCols[i]));
 	      values.push_back(scaledValue);	      
 	    }
 	  }
@@ -861,7 +885,7 @@ namespace AMDiS {
 	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
 	     icursor != icend; ++icursor) {
 	  // Global index of the current column index.
-	  int globalColDof = (*interiorMap)[nColMat][col(*icursor)].global;
+	  int globalColDof = (*interiorMap)[colComp][col(*icursor)].global;
 
 	  // Ignore all zero entries, expect it is a diagonal entry.
  	  if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
@@ -891,8 +915,8 @@ namespace AMDiS {
 	  // === Translate the matrix entries to PETSc's matrix.
 
 	  for (unsigned int i = 0; i < entry.size(); i++) {
-	    int rowIdx = interiorMap->getMatIndex(nRowMat, entry[i].first);
-	    int colIdx = interiorMap->getMatIndex(nColMat, entry[i].second);
+	    int rowIdx = interiorMap->getMatIndex(rowComp, entry[i].first);
+	    int colIdx = interiorMap->getMatIndex(colComp, entry[i].second);
 
 	    colsMap[rowIdx].push_back(colIdx);
 	    valsMap[rowIdx].push_back(scaledValue);
@@ -920,7 +944,7 @@ namespace AMDiS {
   void PetscSolverGlobalMatrix::setDofVector(Vec vecInterior, 
 					     Vec vecCoarse,
 					     DOFVector<double>* vec, 
-					     int nRowVec, 
+					     int rowComp, 
 					     bool rankOnly)
   {
     FUNCNAME("PetscSolverGlobalMatrix::setDofVector()");
@@ -929,36 +953,43 @@ namespace AMDiS {
     PeriodicMap &perMap = meshDistributor->getPeriodicMap();
 
     ParallelDofMapping *rowCoarseSpace = 
-      (coarseSpaceMap.size() ? coarseSpaceMap[nRowVec] : NULL);
+      (coarseSpaceMap.size() ? coarseSpaceMap[rowComp] : NULL);
+
+    map<DegreeOfFreedom, double> &dirichletValues = vec->getDirichletValues();
 
     // Traverse all used DOFs in the dof vector.
     DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
     for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+      
+      DegreeOfFreedom dof = dofIt.getDOFIndex();
+      
+      if (rankOnly && !(*interiorMap)[rowComp].isRankDof(dof))
+	continue;
 
-      if (rankOnly && !(*interiorMap)[nRowVec].isRankDof(dofIt.getDOFIndex()))
+      // Dirichlet rows can be set only be the owner ranks.
+      if (dirichletValues.count(dof) && !((*interiorMap)[rowComp].isRankDof(dof)))
 	continue;
 
-      if (isCoarseSpace(nRowVec, dofIt.getDOFIndex())) {
+      if (isCoarseSpace(rowComp, dof)) {
 	TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n");
 
-	int index = rowCoarseSpace->getMatIndex(nRowVec, dofIt.getDOFIndex());
+	int index = rowCoarseSpace->getMatIndex(rowComp, dof);
 	VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES);
       } else {
-	if ((*interiorMap)[nRowVec].isSet(dofIt.getDOFIndex()) == false)
+	if ((*interiorMap)[rowComp].isSet(dof) == false)
 	  continue;
 
 	// Calculate global row index of the DOF.
-	DegreeOfFreedom globalRowDof = 
-	  (*interiorMap)[nRowVec][dofIt.getDOFIndex()].global;
+	DegreeOfFreedom globalRowDof = (*interiorMap)[rowComp][dof].global;
 	
 	// Get PETSc's mat index of the row DOF.
 	int index = 0;
 	if (interiorMap->isMatIndexFromGlobal())
 	  index = 
-	    interiorMap->getMatIndex(nRowVec, globalRowDof) + rStartInterior;
+	    interiorMap->getMatIndex(rowComp, globalRowDof) + rStartInterior;
 	else
 	  index =
-	    interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()) + rStartInterior;
+	    interiorMap->getMatIndex(rowComp, dof) + rStartInterior;
 
 	if (perMap.isPeriodic(feSpace, globalRowDof)) {
 	  std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof);
@@ -968,7 +999,7 @@ namespace AMDiS {
 	  for (std::set<int>::iterator perIt = perAsc.begin(); 
 	       perIt != perAsc.end(); ++perIt) {
 	    int mappedDof = perMap.map(feSpace, *perIt, globalRowDof);
-	    int mappedIndex = interiorMap->getMatIndex(nRowVec, mappedDof);
+	    int mappedIndex = interiorMap->getMatIndex(rowComp, mappedDof);
 
 	    VecSetValue(vecInterior, mappedIndex, value, ADD_VALUES);
 	  }	  
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
index 6ae8a3343eef3101ffd465cdceaca25a7c69782f..5566f7de3dcbe27d3f886b8caceacf920b23d2f1 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
@@ -94,19 +94,21 @@ namespace AMDiS {
     virtual void exitPreconditioner(PC pc);
 
     /// Takes a DOF matrix and sends the values to the global PETSc matrix.
-    void setDofMatrix(DOFMatrix* mat, int nRowMat = 0, int nColMat = 0);
+    void setDofMatrix(DOFMatrix* mat, int rowComp = 0, int colComp = 0);
 
     /// Takes a DOF vector and sends its values to a given PETSc vector.
     void setDofVector(Vec vecInterior,
 		      Vec vecCoarse,
 		      DOFVector<double>* vec, 
-		      int nRowVec, bool rankOnly = false);
+		      int rowCompxo, 
+		      bool rankOnly = false);
 
     inline void setDofVector(Vec vecInterior,
 			     DOFVector<double>* vec, 
-			     int nRowVec, bool rankOnly = false)
+			     int rowComp, 
+			     bool rankOnly = false)
     {
-      setDofVector(vecInterior, PETSC_NULL, vec, nRowVec, rankOnly);
+      setDofVector(vecInterior, PETSC_NULL, vec, rowComp, rankOnly);
     }
 
     inline void setDofVector(Vec vecInterior,