diff --git a/AMDiS/src/ProblemStat.cc b/AMDiS/src/ProblemStat.cc
index 25a3e2a8ed1440208cd8befeebb06669ad513831..63e657844474e3207d1259381a4f1233d8072dbc 100644
--- a/AMDiS/src/ProblemStat.cc
+++ b/AMDiS/src/ProblemStat.cc
@@ -513,7 +513,8 @@ namespace AMDiS {
     Parameters::get(initFileStr, solverType);
     OEMSolverCreator *solverCreator = 
       dynamic_cast<OEMSolverCreator*>(CreatorMap<OEMSolver>::getCreator(solverType, initFileStr));
-    TEST_EXIT(solverCreator)("no solver type\n");
+    TEST_EXIT(solverCreator)
+      ("No valid solver type found in parameter \"%s\"\n", initFileStr.c_str());
     solverCreator->setName(initFileStr);
     solver = solverCreator->create();
     solver->initParameters();
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 617a3d07f8b4d70c42a01e149e060b182222f235..c82c2884e233fcf515413c3823095b53f498d390 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -228,7 +228,7 @@ namespace AMDiS {
 
       if (writePartMesh > 0) {
 	debug::writeElementIndexMesh(mesh , debugOutputDir + "elementIndex");
-	ParallelDebug::writePartitioning(*this, debugOutputDir + "part.vtu");
+	ParallelDebug::writePartitioning(*this, debugOutputDir + "part");
       }
     }
 
@@ -901,7 +901,7 @@ namespace AMDiS {
 
   void MeshDistributor::createMeshLevelStructure()
   {
-    FUNCNAME("MeshDistributor::createMeshLevelStructure()");
+    FUNCNAME("MeshDistributor::createMeshLevelStructure()");  
 
     if (mpiSize != 16)
       return;
@@ -1663,10 +1663,14 @@ namespace AMDiS {
     // MSG("Memory usage of element object database = %5.f KByte\n", 
     // 	static_cast<double>(memsize / 1024.0));
 
+    MSG("MAKE INT BOUND!\n");
     intBoundary.create(levelData, 0, elObjDb);
     ParallelDebug::printBoundaryInfo(intBoundary);
 
+    MSG("TEST = %d\n", levelData.getLevelNumber());
+
     if (levelData.getLevelNumber() > 1) {
+      MSG("MAKE INT SD BOUND!\n");
       intBoundarySd.create(levelData, 1, elObjDb);
       ParallelDebug::printBoundaryInfo(intBoundarySd, 0, true);
     }
@@ -1847,18 +1851,11 @@ namespace AMDiS {
 
     int test = 0;
     Parameters::get("parallel->remove periodic boundary", test);
-
-//    if (test == 0) {
-//121106 replaced if statement
-
-	if(true){
-		//write latest mesh properties to vtu file
-		debug::writeElementIndexMesh(mesh , debugOutputDir + "elementIndex");
-		ParallelDebug::writePartitioning(*this, debugOutputDir + "part.vtu");
-
- 	    ParallelDebug::testCommonDofs(*this, true);
-    	ParallelDebug::testGlobalIndexByCoords(*this);   
+    if (test == 0) {
+      ParallelDebug::testCommonDofs(*this, true);
+      ParallelDebug::testGlobalIndexByCoords(*this);   
     }
+
 #else
 
     for (int i = 0; i < static_cast<int>(dofMaps.size()); i++) {
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index 474b1d654b701b3f63854da326856b1cb159235f..e9882c9c2aa1c69a9de4bf5468f07a9a1d1c75e6 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -191,6 +191,16 @@ namespace AMDiS {
       return mpiComm;
     }
 
+    inline MPI::Intracomm getLocalMpiComm()
+    {
+      if (levelData.getLevelNumber() == 1)
+	return PETSC_COMM_SELF;
+
+      TEST_EXIT(levelData.getLevelNumber() == 2)("Not yet implemented!\n");
+
+      return levelData.getMpiComm(1);
+    }
+
     inline bool isInitialized()
     {
       return initialized;
diff --git a/AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc b/AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc
index b3e29ca10d1e5a80ee379cec199ded8f6725e3fe..ed1d1607f579c7e559ede4dd63e7021f92bd6cfd 100644
--- a/AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc
+++ b/AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc
@@ -61,6 +61,28 @@ namespace AMDiS {
   }
 
 
+  void ParallelCoarseSpaceSolver::setMeshDistributor(MeshDistributor *md)
+  {
+    FUNCNAME("ParallelCoarseSpaceSolver::setMeshDistributor()");
+
+    meshDistributor = md;
+    mpiCommGlobal = meshDistributor->getMpiComm();
+    mpiCommLocal = meshDistributor->getLocalMpiComm();
+  }
+
+
+  void ParallelCoarseSpaceSolver::setMeshDistributor(MeshDistributor *md,
+						     MPI::Intracomm mpiComm0,
+						     MPI::Intracomm mpiComm1)
+  {
+    FUNCNAME("ParallelCoarseSpaceSolver::setMeshDistributor()");
+
+    meshDistributor = md;
+    mpiCommGlobal = mpiComm0;
+    mpiCommLocal = mpiComm1; 
+  }
+
+
   void ParallelCoarseSpaceSolver::prepare()
   {
     FUNCNAME("ParallelCoarseSpaceSolver:prepare()");
@@ -116,7 +138,7 @@ namespace AMDiS {
     bool localMatrix = (coarseSpaceMap.size() && subdomainLevel == 0);
 
     if (checkMeshChange()) {
-       int nMat = uniqueCoarseMap.size() + 1;
+      int nMat = uniqueCoarseMap.size() + 1;
       nnz.resize(nMat);
       for (int i = 0; i < nMat; i++) {
 	nnz[i].resize(nMat);
@@ -151,21 +173,52 @@ namespace AMDiS {
     
     int nRankInteriorRows = interiorMap->getRankDofs();
     int nOverallInteriorRows = interiorMap->getOverallDofs();
+
+    MPI::Intracomm mpiGlobal = meshDistributor->getMeshLevelData().getMpiComm(subdomainLevel);
     
     if (localMatrix) {
       MatCreateSeqAIJ(mpiCommLocal, nRankInteriorRows, nRankInteriorRows,
 		      0, nnz[0][0].dnnz,
 		      &mat[0][0]);
     } else {
-      MatCreateAIJ(mpiCommGlobal, nRankInteriorRows, nRankInteriorRows, 
-		   nOverallInteriorRows, nOverallInteriorRows,
-		   0, nnz[0][0].dnnz, 0, nnz[0][0].onnz,		   
-		   &mat[0][0]);    
+      if (subdomainLevel == 0) {
+	MatCreateAIJ(mpiGlobal, nRankInteriorRows, nRankInteriorRows, 
+		     nOverallInteriorRows, nOverallInteriorRows,
+		     0, nnz[0][0].dnnz, 0, nnz[0][0].onnz,		   
+		     &mat[0][0]);    
+      } else {
+	MSG("WARNING: USE MULTILEVEL, MAKE THIS GENERAL!\n");
+	// TODO: MAKE NNZ CALCULATION GENERAL (see also creation of coarse space
+	// matrices some lines below)
+
+	MatCreateAIJ(mpiGlobal, nRankInteriorRows, nRankInteriorRows, 
+		     nOverallInteriorRows, nOverallInteriorRows,
+		     300, PETSC_NULL, 300, PETSC_NULL, &mat[0][0]);    
+
+      }
     }
     MatSetOption(mat[0][0], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
 
-    VecCreateMPI(mpiCommGlobal, nRankInteriorRows, nOverallInteriorRows, &vecSol[0]);
-    VecCreateMPI(mpiCommGlobal, nRankInteriorRows, nOverallInteriorRows, &vecRhs[0]);
+
+    if (subdomainLevel == 0) {
+      VecCreateMPI(mpiGlobal, nRankInteriorRows, nOverallInteriorRows, 
+		   &vecSol[0]);
+      VecCreateMPI(mpiGlobal, nRankInteriorRows, nOverallInteriorRows, 
+		   &vecRhs[0]);
+    } else {
+      MSG("WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!\n");
+
+      MPI::Intracomm mpiComm = 
+	meshDistributor->getMeshLevelData().getMpiComm(subdomainLevel - 1);
+      
+      int nAllRows = nRankInteriorRows;
+      mpi::globalAdd(nAllRows);
+      VecCreateMPI(mpiComm, nRankInteriorRows, nAllRows, 
+		   &vecSol[0]);
+      VecCreateMPI(mpiComm, nRankInteriorRows, nAllRows, 
+		   &vecRhs[0]);
+    }
+
 
     int nCoarseMap = uniqueCoarseMap.size();
     for (int i = 0; i < nCoarseMap; i++) {
@@ -174,16 +227,30 @@ namespace AMDiS {
       int nRankCoarseRows = cMap->getRankDofs();
       int nOverallCoarseRows = cMap->getOverallDofs();
       
-      MatCreateAIJ(mpiCommGlobal,
-		   nRankCoarseRows, nRankCoarseRows,
-		   nOverallCoarseRows, nOverallCoarseRows,
-		   0, nnz[i + 1][i + 1].dnnz, 0, nnz[i + 1][i + 1].onnz,
-		   &mat[i + 1][i + 1]);
+      if (subdomainLevel == 0) {
+	MatCreateAIJ(mpiCommGlobal,
+		     nRankCoarseRows, nRankCoarseRows,
+		     nOverallCoarseRows, nOverallCoarseRows,
+		     0, nnz[i + 1][i + 1].dnnz, 0, nnz[i + 1][i + 1].onnz,
+		     &mat[i + 1][i + 1]);
+      } else {
+	MSG("WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!\n");
+	
+	MPI::Intracomm mpiComm = 
+	  meshDistributor->getMeshLevelData().getMpiComm(subdomainLevel - 1);
+
+	MatCreateAIJ(mpiComm,
+		     nRankCoarseRows, nRankCoarseRows,
+		     nOverallCoarseRows, nOverallCoarseRows,
+		     300, PETSC_NULL, 300, PETSC_NULL, 
+		     &mat[i + 1][i + 1]);
+      }
       MatSetOption(mat[i + 1][i + 1], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
       cMap->createVec(vecSol[i + 1]);
       cMap->createVec(vecRhs[i + 1]);
     }
 
+
     for (int i = 0; i < nCoarseMap + 1; i++) {
       for (int j = 0; j < nCoarseMap + 1; j++) {
 	if (i == j)
@@ -195,18 +262,43 @@ namespace AMDiS {
 	int nColsRankMat = (j == 0 ? nRankInteriorRows : uniqueCoarseMap[j - 1]->getRankDofs());
 	int nColsOverallMat = (j == 0 ? nOverallInteriorRows : uniqueCoarseMap[j - 1]->getOverallDofs());
 
-	MatCreateAIJ(mpiCommGlobal,
-		     nRowsRankMat, nColsRankMat,
-		     nRowsOverallMat, nColsOverallMat,
-		     0, nnz[i][j].dnnz, 0, nnz[i][j].onnz,
-		     &mat[i][j]);	  
-	MatCreateAIJ(mpiCommGlobal,
-		     nColsRankMat, nRowsRankMat,
-		     nColsOverallMat, nRowsOverallMat,
-		     0, nnz[j][i].dnnz, 0, nnz[j][i].onnz,
-		     &mat[j][i]);
+	if (subdomainLevel == 0) {  
+	  MatCreateAIJ(mpiCommGlobal,
+		       nRowsRankMat, nColsRankMat,
+		       nRowsOverallMat, nColsOverallMat,
+		       0, nnz[i][j].dnnz, 0, nnz[i][j].onnz,
+		       &mat[i][j]);	  
+	  MatCreateAIJ(mpiCommGlobal,
+		       nColsRankMat, nRowsRankMat,
+		       nColsOverallMat, nRowsOverallMat,
+		       0, nnz[j][i].dnnz, 0, nnz[j][i].onnz,
+		       &mat[j][i]);
+	} else {
+	  MSG("WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!\n");	  
+
+	  if (i == 0) {
+	    nRowsOverallMat = nRowsRankMat;
+	    mpi::globalAdd(nRowsOverallMat);
+	  }
+
+	  if (j == 0) {
+	    nColsOverallMat = nColsRankMat;
+	    mpi::globalAdd(nColsOverallMat);
+	  }
+
+	  MatCreateAIJ(mpiCommGlobal,
+		       nRowsRankMat, nColsRankMat,
+		       nRowsOverallMat, nColsOverallMat,
+		       300, PETSC_NULL, 300, PETSC_NULL,
+		       &mat[i][j]);	  
+	  MatCreateAIJ(mpiCommGlobal,
+		       nColsRankMat, nRowsRankMat,
+		       nColsOverallMat, nRowsOverallMat,
+		       300, PETSC_NULL, 300, PETSC_NULL,
+		       &mat[j][i]);
+	}
       }
-    }    
+    }
   }
 
 
diff --git a/AMDiS/src/parallel/ParallelCoarseSpaceSolver.h b/AMDiS/src/parallel/ParallelCoarseSpaceSolver.h
index 5f3f9d5a310e3e7289c2fd023ee27081b1c690da..f30724a27a80b354cdc9c43a4ebc8168e7781edf 100644
--- a/AMDiS/src/parallel/ParallelCoarseSpaceSolver.h
+++ b/AMDiS/src/parallel/ParallelCoarseSpaceSolver.h
@@ -78,15 +78,13 @@ namespace AMDiS {
     void setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs, 
 				  int component = -1);
 
-    /// Set mesh distributor object and MPI communicators.
-    void setMeshDistributor(MeshDistributor *m,
+    /// Set mesh distributor object
+    void setMeshDistributor(MeshDistributor *m);
+
+    /// Set mesh distributor object
+    void setMeshDistributor(MeshDistributor *md,
 			    MPI::Intracomm mpiComm0,
-			    MPI::Intracomm mpiComm1)
-    {
-      meshDistributor = m;
-      mpiCommGlobal = mpiComm0;
-      mpiCommLocal = mpiComm1; 
-    }
+			    MPI::Intracomm mpiComm1);
 
     /// Set level of the interior discretization. Is only used for
     /// multi-level methods.
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
index 2495a68e7aa9ab72dee744979cd76c8b630de33e..7faaa078bda8882f25ada2de9e239355b2c6c587 100644
--- a/AMDiS/src/parallel/PetscProblemStat.cc
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -55,9 +55,7 @@ namespace AMDiS {
     
     meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
 
-    petscSolver->setMeshDistributor(meshDistributor, 
-				    meshDistributor->getMpiComm(),
-				    PETSC_COMM_SELF);   
+    petscSolver->setMeshDistributor(meshDistributor);
     petscSolver->init(this->getComponentSpaces(), this->getFeSpaces());
   }
   
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 2c0087c9ad9db1df03afe341b53471120dade5c7..465c8674196972b01999b7eade0f76561ad26437 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -101,13 +101,25 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::initialize()");
 
+    MSG("INIT WITH MESH-LEVEL: %d\n", meshLevel);
+
     TEST_EXIT_DBG(meshLevel + 1 == meshDistributor->getMeshLevelData().getLevelNumber())
       ("Mesh hierarchy does not contain %d levels!\n", meshLevel + 1);
 
     MeshLevelData& levelData = meshDistributor->getMeshLevelData();
 
     if (subdomain == NULL) {
-      subdomain = new PetscSolverGlobalMatrix("");
+      string subSolverInitStr = initFileStr + "->subsolver";
+      string solverType = "petsc";
+      Parameters::get(subSolverInitStr, solverType);
+      OEMSolverCreator *solverCreator =
+	dynamic_cast<OEMSolverCreator*>(CreatorMap<OEMSolver>::getCreator(solverType, initFileStr));
+      TEST_EXIT(solverCreator)
+	("No valid solver type found in parameter \"%s\"\n", 
+	 initFileStr.c_str());
+      solverCreator->setName(subSolverInitStr);
+      subdomain = dynamic_cast<PetscSolver*>(solverCreator->create());
+      subdomain->initParameters();
       subdomain->setSymmetric(isSymmetric);
       if (dirichletMode == 0)
 	subdomain->setHandleDirichletRows(true);
@@ -115,14 +127,15 @@ namespace AMDiS {
 	subdomain->setHandleDirichletRows(false);
 
       if (meshLevel == 0) {
-	subdomain->setMeshDistributor(meshDistributor, 
-				      mpiCommGlobal, mpiCommLocal);
+	subdomain->setMeshDistributor(meshDistributor);
       } else {
 	subdomain->setMeshDistributor(meshDistributor, 
 				      levelData.getMpiComm(meshLevel - 1),
 				      levelData.getMpiComm(meshLevel));
 	subdomain->setLevel(meshLevel);
       }
+
+      delete solverCreator;
     }
 
     primalDofMap.init(levelData, componentSpaces, feSpaces);   
@@ -318,8 +331,10 @@ namespace AMDiS {
 	WorldVector<double> c;
 	feSpace->getMesh()->getDofIndexCoords(*it, feSpace, c);
 
-	if (fabs(c[0]) < e || fabs(c[1]) < e ||
-	    fabs(c[0] - 25.0) < e || fabs(c[1] - 25.0) < e ||
+	if ((fabs(c[0]) < e && fabs(c[1] - 12.5) < e) ||
+	    (fabs(c[0] - 25.0) < e && fabs(c[1] - 12.5) < e) ||
+	    (fabs(c[0] - 12.5) < e && fabs(c[1]) < e) ||
+	    (fabs(c[0] - 12.5) < e && fabs(c[1] - 25.0) < e) ||
 	    (fabs(c[0] - 12.5) < e && fabs(c[1] - 12.5) < e)) {
 	  MSG("PRIMAL COORD %f %f\n", c[0], c[1]);
 	  primals.insert(**it);
@@ -552,7 +567,6 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::createIndexB()");
 
-
     const FiniteElemSpace *feSpace = componentSpaces[component];
     DOFAdmin* admin = feSpace->getAdmin();
 
@@ -937,8 +951,17 @@ namespace AMDiS {
       if (augmentedLagrange == false) {
 	schurPrimalData.subSolver = subdomain;
 	
-	localDofMap.createVec(schurPrimalData.tmp_vec_b, nGlobalOverallInterior);
+	if (meshLevel == 0) {
+	  localDofMap.createVec(schurPrimalData.tmp_vec_b, nGlobalOverallInterior);
+	} else {
+	  MSG("WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!\n");
+	  VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel - 1),
+		       localDofMap.getRankDofs(),
+		       nGlobalOverallInterior, &(schurPrimalData.tmp_vec_b));
+	}
+
 	primalDofMap.createVec(schurPrimalData.tmp_vec_primal);
+
 	
 	MatCreateShell(mpiCommGlobal,
 		       primalDofMap.getRankDofs(), 
@@ -1221,7 +1244,15 @@ namespace AMDiS {
     fetiData.subSolver = subdomain;
     fetiData.ksp_schur_primal = &ksp_schur_primal;
 
-    localDofMap.createVec(fetiData.tmp_vec_b0, nGlobalOverallInterior);
+    if (meshLevel == 0) {
+      localDofMap.createVec(fetiData.tmp_vec_b0, nGlobalOverallInterior);      
+    } else {
+      MSG("WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!\n");
+      VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel - 1),
+		   localDofMap.getRankDofs(),
+		   nGlobalOverallInterior, &(fetiData.tmp_vec_b0));
+    }
+
     lagrangeMap.createVec(fetiData.tmp_vec_lagrange);
     primalDofMap.createVec(fetiData.tmp_vec_primal0);
 
@@ -1596,7 +1627,7 @@ namespace AMDiS {
 
 #if (DEBUG != 0)
     PetscInt nRow, nCol;
-    MatGetSize(subdomain->getMatInterior(), &nRow, &nCol);
+    MatGetLocalSize(subdomain->getMatInterior(), &nRow, &nCol);
     mpi::globalAdd(nRow);
     mpi::globalAdd(nCol);
 
@@ -1893,7 +1924,9 @@ namespace AMDiS {
     createDirichletData(*mat);
 
     createFetiData();
-  
+
+    TEST_EXIT(coarseSpaceMap.size() == 0)("This is not yet supported!\n");
+ 
     // === Create matrices for the FETI-DP method. ===
 
     if (printTimings)
@@ -1944,6 +1977,8 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::fillPetscRhs()");
 
+    TEST_EXIT(coarseSpaceMap.size() == 0)("This is not yet supported!\n");
+
     subdomain->fillPetscRhs(vec);
   }
 
@@ -2193,8 +2228,18 @@ namespace AMDiS {
     // === Some temporary vectors. ===
 
     Vec tmp_b0, tmp_b1;
-    localDofMap.createVec(tmp_b0, nGlobalOverallInterior);
-    localDofMap.createVec(tmp_b1, nGlobalOverallInterior);
+    if (meshLevel == 0) {
+      localDofMap.createVec(tmp_b0, nGlobalOverallInterior);
+      localDofMap.createVec(tmp_b1, nGlobalOverallInterior);
+    } else {
+      MSG("WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!\n");
+      VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel - 1),
+		   localDofMap.getRankDofs(),
+		   nGlobalOverallInterior, &tmp_b0);
+      VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel - 1),
+		   localDofMap.getRankDofs(),
+		   nGlobalOverallInterior, &tmp_b1);
+    }
 
     Vec tmp_primal0, tmp_primal1;
     primalDofMap.createVec(tmp_primal0);
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 806f79c744d8bbc00dfd3fb0a6a35e3a59a6090e..a5e3f7da6c24b7a1d9d27771294d5c1f2214718e 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -825,9 +825,7 @@ namespace AMDiS {
 
     PetscSolver* subSolver = new PetscSolverGlobalMatrix("");
     subSolver->setKspPrefix(kspPrefix);
-    subSolver->setMeshDistributor(meshDistributor,
-				  mpiCommGlobal,
-				  mpiCommLocal);
+    subSolver->setMeshDistributor(meshDistributor);
     subSolver->init(fe, fe);
 
     ParallelDofMapping &subDofMap = subSolver->getDofMap();