diff --git a/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc b/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc
index 1c703b38de364933dba0d07962d95651b21b3f4f..3fbc4cc0d7635a264cefd8638663c11ca7435cc0 100644
--- a/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc
+++ b/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc
@@ -114,11 +114,7 @@ namespace AMDiS {
     bool localMatrix = (coarseSpaceMap.size() && subdomainLevel == 0);
 
     if (checkMeshChange()) {
-      // Mesh has been changed, recompute interior DOF mapping.
-      interiorMap->setComputeMatIndex(!localMatrix);
-      interiorMap->update();
-
-      int nMat = uniqueCoarseMap.size() + 1;
+       int nMat = uniqueCoarseMap.size() + 1;
       nnz.resize(nMat);
       for (int i = 0; i < nMat; i++) {
 	nnz[i].resize(nMat);
diff --git a/AMDiS/src/parallel/ParallelDofMapping.cc b/AMDiS/src/parallel/ParallelDofMapping.cc
index d0c65bfaf50786b4afd0394818036f2ae678ffa9..eb40555a8467c8e03c835bddaf7c142651d2b1ba 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.cc
+++ b/AMDiS/src/parallel/ParallelDofMapping.cc
@@ -215,7 +215,8 @@ namespace AMDiS {
       nRankDofs(1),
       nLocalDofs(1),
       nOverallDofs(1),
-      rStartDofs(1)
+      rStartDofs(1),
+      mode(mode)
   {
     switch (mode) {
     case COMPONENT_WISE:
@@ -535,4 +536,15 @@ namespace AMDiS {
     ISCreateStride(mpiComm, nRankRows, firstMatIndex, 1, &is);
   }
 
+
+  void ParallelDofMapping::printInfo()
+  {
+    FUNCNAME("ParallelDofMapping::printInfo()");
+
+    if (mode == COMPONENT_WISE) {
+      MSG("Mapping is defined by component numbers!\n");
+    } else {
+      MSG("Mapping is defined by FE spaces!\n");
+    }
+  }
 }
diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h
index 9ed7f481f689dc2220a16ea5e5f2570db46815ee..0db33235cb3b5fa913beb235f240337a2373f56b 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.h
+++ b/AMDiS/src/parallel/ParallelDofMapping.h
@@ -820,6 +820,10 @@ namespace AMDiS {
       VecCreateSeq(PETSC_COMM_SELF, getRankDofs(), &vec);
     }
 
+    /// Prints out some information about the mapping. May be used during
+    /// debugging or parallel solver creation.
+    void printInfo();   
+
   protected:
     /// Compute \ref nRankDofs.
     int computeRankDofs();
@@ -883,6 +887,10 @@ namespace AMDiS {
 
     /// Set of unique FE spaces.
     vector<const FiniteElemSpace*> feSpaces;
+    
+    /// Defines the DOF mapping either. The mapping may be defined either for
+    /// FE spaces or for component numbers.
+    DofMappingMode mode;
   };
 }
 
diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index 58bdacd9cb900032ff56cae481e332724c34a401..bbb2f45c2d1866f6bd430ac2299f465d9776a34c 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -45,7 +45,8 @@ namespace AMDiS {
 
 
   void PetscSolver::init(vector<const FiniteElemSpace*> &fe0,
-			 vector<const FiniteElemSpace*> &fe1)
+			 vector<const FiniteElemSpace*> &fe1,
+			 bool createGlobalMapping)
   {
     FUNCNAME("PetscSolver::init()");
 
@@ -60,18 +61,20 @@ namespace AMDiS {
     int nLevels = levelData.getLevelNumber();
     TEST_EXIT_DBG(nLevels >= 1)("Should not happen!\n");
 
-    dofMap.init(levelData, componentSpaces, feSpaces);
-    dofMap.setMpiComm(levelData.getMpiComm(0), 0);
-    dofMap.setDofComm(meshDistributor->getDofComm());
-    dofMap.clear();
-    meshDistributor->registerDofMap(dofMap);
-
-    if (nLevels > 1) {
-      dofMapSd.init(levelData, componentSpaces, feSpaces);
-      dofMapSd.setMpiComm(levelData.getMpiComm(1), 1);
-      dofMapSd.setDofComm(meshDistributor->getDofCommSd());
-      dofMapSd.clear();
-      meshDistributor->registerDofMap(dofMapSd);
+    if (createGlobalMapping) {
+      dofMap.init(levelData, componentSpaces, feSpaces);
+      dofMap.setMpiComm(levelData.getMpiComm(0), 0);
+      dofMap.setDofComm(meshDistributor->getDofComm());
+      dofMap.clear();
+      meshDistributor->registerDofMap(dofMap);
+      
+      if (nLevels > 1) {
+	dofMapSd.init(levelData, componentSpaces, feSpaces);
+	dofMapSd.setMpiComm(levelData.getMpiComm(1), 1);
+	dofMapSd.setDofComm(meshDistributor->getDofCommSd());
+	dofMapSd.clear();
+	meshDistributor->registerDofMap(dofMapSd);
+      }
     }
   }
 
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index 31d0267476d797766b33faa708b01644e90043b5..a227fbf65d92ab4a23b1b20b645d157c7beedf41 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -54,8 +54,9 @@ namespace AMDiS {
 
     virtual ~PetscSolver() {}
 
-    virtual void init(vector<const FiniteElemSpace*> &componentSpaces,
-		      vector<const FiniteElemSpace*> &feSpaces);
+    void init(vector<const FiniteElemSpace*> &componentSpaces,
+	      vector<const FiniteElemSpace*> &feSpaces,
+	      bool createGlobalMapping = true);
 
     /** \brief
      * Create a PETSc matrix. The given DOF matrices are used to create the nnz 
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 253ecd46855b054c767a57feb9455e0ac3c7f58b..2cd657f6d40300e0d06e05c4a291562e1b715acf 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -1282,20 +1282,28 @@ namespace AMDiS {
 	  
 	  if (!massMatrixSolver) {
 	    MSG("START CREATE MASS MATRIX!\n");
+	    ParallelDofMapping *massMapping = new ParallelDofMapping(COMPONENT_WISE);
+	    massMapping->init(meshDistributor->getMeshLevelData(), 
+			      pressureFeSpace, pressureFeSpace);
+	    massMapping->setDofComm(meshDistributor->getDofComm());
+	    (*massMapping)[0] = interfaceDofMap[pressureComponent];
+	    massMapping->update();	    
+
 	    DOFMatrix massMatrix(pressureFeSpace, pressureFeSpace);
 	    Operator op(pressureFeSpace, pressureFeSpace);
 	    Simple_ZOT zot;
 	    op.addTerm(&zot);
 	    massMatrix.assembleOperator(op);
 
-	    ParallelDofMapping massMatMapping = interfaceDofMap;
 	    massMatrixSolver = new PetscSolverGlobalMatrix;
 	    massMatrixSolver->setKspPrefix("mass_");
 	    massMatrixSolver->setMeshDistributor(meshDistributor,
 						 mpiCommGlobal,
 						 mpiCommLocal);
-	    massMatrixSolver->setDofMapping(&massMatMapping);
+	    massMatrixSolver->setDofMapping(massMapping);
+	    MSG("START ASM ===\n");
 	    massMatrixSolver->fillPetscMatrix(&massMatrix);
+	    MSG("END ASM ===\n");
 
 	    int r, c;
 	    MatGetSize(massMatrixSolver->getMatInterior(), &r, &c);