diff --git a/AMDiS/src/Global.cc b/AMDiS/src/Global.cc
index c18e9013b9dc4519910a21ae317ccc7846190a32..941b12eb6baf9ca18d062faf112c3ca6294e777a 100644
--- a/AMDiS/src/Global.cc
+++ b/AMDiS/src/Global.cc
@@ -332,7 +332,7 @@ namespace AMDiS {
   }
 
 
-  void processMemUsage(double& vm_usage, double& resident_set)
+  void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte)
   {
     using std::ios_base;
     using std::ifstream;
@@ -361,8 +361,13 @@ namespace AMDiS {
 
     // in case x86-64 is configured to use 2MB pages    
     long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024;
-    vm_usage     = vsize / 1024.0;
+    vm_usage = vsize / 1024.0;
     resident_set = rss * page_size_kb;
+
+    if (inMegaByte) {
+      vm_usage /= 1024.0;
+      resident_set /= 1024.0;
+    }
   }
 
 
diff --git a/AMDiS/src/Global.h b/AMDiS/src/Global.h
index 557600fa8c5e76c7a87d44a6900c44a8d2d59d51..0be17a4b2c6b1c4bd843ce7e1f7079809b0b452c 100644
--- a/AMDiS/src/Global.h
+++ b/AMDiS/src/Global.h
@@ -109,7 +109,7 @@ namespace AMDiS {
 
   void waitSec(int seconds);
 
-  void processMemUsage(double& vm_usage, double& resident_set);
+  void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte = true);
 
   /// Content comparision of two pointers. Used e.g. for find_if
   template<typename T>
diff --git a/AMDiS/src/Mesh.cc b/AMDiS/src/Mesh.cc
index 0b15134351fb767ad239b64581073b23267a79c6..e3c7615acd510f484b6334a195c20b11882c32a6 100644
--- a/AMDiS/src/Mesh.cc
+++ b/AMDiS/src/Mesh.cc
@@ -1213,18 +1213,19 @@ namespace AMDiS {
     Parameters::get(name + "->check", check);
     Parameters::get(name + "->preserve coarse dofs", preserveCoarseDOFs);
 
-    if (macroFilename.length()) {
-      // In parallel computations, check if a finer macro mesh is required.
+    TEST_EXIT(macroFilename.length())
+      ("No mesh defined for parameter %s->macro file name !\n", name.c_str());
+
+    // In parallel computations, check if a finer macro mesh is required.
 #ifdef HAVE_PARALLEL_DOMAIN_AMDIS
-      checkParallelMacroFile(macroFilename, periodicFilename, check);
+    checkParallelMacroFile(macroFilename, periodicFilename, check);
 #endif
       
-      macroFileInfo = 
-	MacroReader::readMacro(macroFilename, this, periodicFilename, check);
+    macroFileInfo = 
+      MacroReader::readMacro(macroFilename, this, periodicFilename, check);
 
-      if (!valueFilename.length())
-       	clearMacroFileInfo();
-    }
+    if (!valueFilename.length())
+      clearMacroFileInfo();    
 
     initialized = true;
   }
@@ -1251,11 +1252,18 @@ namespace AMDiS {
     localAdmin->setNumberOfDofs(admin[0]->getNumberOfDofs());
     testMesh.addDOFAdmin(localAdmin);
     
+    MSG("START READ FILE %s\n", macroFilename);
+
     MacroInfo *testMacroInfo = 
       MacroReader::readMacro(macroFilename, &testMesh, periodicFilename, check);
     testMacroInfo->clear();
     delete testMacroInfo;
 
+    MSG("TEST MESH HAS %d ELEMENTS\n", testMesh.getNumberOfMacros());
+
+    MPI::Finalize();
+    exit(0);
+
 
     // === Check the mesh structure. ===
     
diff --git a/AMDiS/src/parallel/CheckerPartitioner.cc b/AMDiS/src/parallel/CheckerPartitioner.cc
index 7ba10fea339d37925720edacf695080cad5eb2e2..4dfa87a7f01092e273d301e4a094c768e07d2a83 100644
--- a/AMDiS/src/parallel/CheckerPartitioner.cc
+++ b/AMDiS/src/parallel/CheckerPartitioner.cc
@@ -25,6 +25,8 @@ namespace AMDiS {
 
     int nElementsPerBlock = (mesh->getDim() == 2 ? 2 : 6);
 
+    MSG("ELS_PER_BLOCK = %d\n", nElementsPerBlock);
+
     TraverseStack stack;
     ElInfo *elInfo = stack.traverseFirst(mesh, 0, Mesh::CALL_EL_LEVEL);
     while (elInfo) {
@@ -33,6 +35,12 @@ namespace AMDiS {
       int elInRank = elIndex / nElementsPerBlock;
       TEST_EXIT_DBG(elInRank < mpiSize)("Should not happen!\n");
 
+      if (elInRank == mpiRank) {
+	MSG("EL %d IS MY!\n", elIndex);
+      } else {
+	MSG("EL %d IS IN RANK %d\n", elIndex, elInRank);
+      }
+
       elementInRank[elIndex] = (elInRank == mpiRank);
       partitionMap[elIndex] = elInRank;	
 	
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
index 5a3f624b9809ff49ba439a501138254dabcf045d..747a10b77d72cd3c28f66f0f902e90df80a6177c 100644
--- a/AMDiS/src/parallel/PetscProblemStat.cc
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -70,16 +70,11 @@ namespace AMDiS {
     double wtime = MPI::Wtime();
 
     double vm, rss;
-    processMemUsage(vm, rss);   
-    vm /= 1024.0;
-    rss /= 1024.0;
-    
+    processMemUsage(vm, rss);       
     MSG("STAGE 1\n");
-    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
-    
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
     mpi::globalAdd(vm);
     mpi::globalAdd(rss);
-
     MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
 
     petscSolver->setMeshDistributor(meshDistributor);
@@ -87,28 +82,20 @@ namespace AMDiS {
 
 
     processMemUsage(vm, rss);   
-    vm /= 1024.0;
-    rss /= 1024.0;
     MSG("STAGE 2\n");
-    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
-    
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
     mpi::globalAdd(vm);
     mpi::globalAdd(rss);
-
     MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
 
 
     petscSolver->solvePetscMatrix(*solution, adaptInfo);   
 
     processMemUsage(vm, rss);   
-    vm /= 1024.0;
-    rss /= 1024.0;
     MSG("STAGE 3\n");
-    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
-    
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
     mpi::globalAdd(vm);
     mpi::globalAdd(rss);
-
     MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
 
 
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index d02741604e0a5d08ed8d56de02bcb36e93678cfc..423c1427ad4710c6d675a12a790834cb9d939c31 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -856,6 +856,10 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::fillPetscMatrix()");   
 
+//     double vm, mem;
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 1 = %f\n", mem);
+
     nComponents = vec->getSize();
 
     // === Create all sets and indices. ===
@@ -874,45 +878,48 @@ namespace AMDiS {
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
 		    nRowsRankB, nRowsRankB, nRowsOverallB, nRowsOverallB,
-		    100, PETSC_NULL, 100, PETSC_NULL, &mat_b_b);
+		    30, PETSC_NULL, 0, PETSC_NULL, &mat_b_b);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
 		    nRowsRankPrimal, nRowsRankPrimal, 
 		    nRowsOverallPrimal, nRowsOverallPrimal,
-		    10, PETSC_NULL, 10, PETSC_NULL, &mat_primal_primal);
+		    30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_primal);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
 		    nRowsRankB, nRowsRankPrimal, 
 		    nRowsOverallB, nRowsOverallPrimal,
-		    100, PETSC_NULL, 100, PETSC_NULL, &mat_b_primal);
+		    30, PETSC_NULL, 30, PETSC_NULL, &mat_b_primal);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
 		    nRowsRankPrimal, nRowsRankB,
 		    nRowsOverallPrimal, nRowsOverallB,
-		    100, PETSC_NULL, 100, PETSC_NULL, &mat_primal_b);
+		    30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b);
 
 
     // === Create matrices for FETI-DP preconditioner. ===
 
     if (fetiPreconditioner != FETI_NONE)
       MatCreateSeqAIJ(PETSC_COMM_SELF,
-		      nRowsDual, nRowsDual, 100, PETSC_NULL,
+		      nRowsDual, nRowsDual, 30, PETSC_NULL,
 		      &mat_duals_duals);
 
     if (fetiPreconditioner == FETI_DIRICHLET) {
       MatCreateSeqAIJ(PETSC_COMM_SELF,
-		      nRowsInterior, nRowsInterior, 100, PETSC_NULL,
+		      nRowsInterior, nRowsInterior, 30, PETSC_NULL,
 		      &mat_interior_interior);
       
       MatCreateSeqAIJ(PETSC_COMM_SELF,
-		      nRowsInterior, nRowsDual, 100, PETSC_NULL,
+		      nRowsInterior, nRowsDual, 30, PETSC_NULL,
 		      &mat_interior_duals);
       
       MatCreateSeqAIJ(PETSC_COMM_SELF,
-		      nRowsDual, nRowsInterior, 100, PETSC_NULL,
+		      nRowsDual, nRowsInterior, 30, PETSC_NULL,
 		      &mat_duals_interior);
     }
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 2 = %f\n", mem);
+
     
     // === Prepare traverse of sequentially created matrices. ===
 
@@ -1121,7 +1128,9 @@ namespace AMDiS {
 	} 
       }
     }
-    
+
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 3 = %f\n", mem);
 
     // === Start global assembly procedure. ===
 
@@ -1137,6 +1146,8 @@ namespace AMDiS {
     MatAssemblyBegin(mat_primal_b, MAT_FINAL_ASSEMBLY);
     MatAssemblyEnd(mat_primal_b, MAT_FINAL_ASSEMBLY);
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 4 = %f\n", mem);
 
     // === Start global assembly procedure for preconditioner matrices. ===
 
@@ -1157,6 +1168,9 @@ namespace AMDiS {
     }
 
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 5 = %f\n", mem);
+
     // === Create and fill PETSc's right hand side vectors. ===
 
     VecCreate(PETSC_COMM_WORLD, &f_b);
@@ -1196,20 +1210,31 @@ namespace AMDiS {
     VecAssemblyBegin(f_primal);
     VecAssemblyEnd(f_primal);
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 6 = %f\n", mem);
+
 
     // === Create and fill PETSc matrix for Lagrange constraints. ===
 
     createMatLagrange();
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 7 = %f\n", mem);
     
     // === Create PETSc solver for the Schur complement on primal variables. ===
     
     createSchurPrimalKsp();
 
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 8 = %f\n", mem);
+
 
     // === Create PETSc solver for the FETI-DP operator. ===
 
     createFetiKsp();
+
+//     processMemUsage(vm, mem);       
+//     MSG("MEM INFO 9 = %f\n", mem);
   }