diff --git a/AMDiS/CMakeLists.txt b/AMDiS/CMakeLists.txt
index 125339dac9d75e9d11cdabca8c0bd4516a39af1f..65676ce32b1aed83c32892efd37cfd2c15852401 100644
--- a/AMDiS/CMakeLists.txt
+++ b/AMDiS/CMakeLists.txt
@@ -261,6 +261,8 @@ if(ENABLE_PARALLEL_DOMAIN)
 			${SOURCE_DIR}/parallel/PetscSolver.cc
 			${SOURCE_DIR}/parallel/PetscProblemStat.cc
 			${SOURCE_DIR}/parallel/PetscSolverFeti.cc
+			${SOURCE_DIR}/parallel/PetscSolverFetiDebug.cc
+			${SOURCE_DIR}/parallel/PetscSolverFetiMonitor.cc
 			${SOURCE_DIR}/parallel/PetscSolverFetiOperators.cc
 			${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc
 			${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
diff --git a/AMDiS/src/AMDiS_fwd.h b/AMDiS/src/AMDiS_fwd.h
index 7afcc8a0efa2c3cd02154a092a63c24909b657a6..a3ecfb0e8dcd4b7118aeecd7f15284b531a503e0 100644
--- a/AMDiS/src/AMDiS_fwd.h
+++ b/AMDiS/src/AMDiS_fwd.h
@@ -103,6 +103,8 @@ namespace AMDiS {
   class FeSpaceDofMap;
   class MatrixNnzStructure;
   class MeshLevelData;
+  class PetscSolverFeti;
+  class PetscSolverFetiDebug;
 #endif
 
   struct BoundaryObject;
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index da0f522c19c349211eae99d1a3535716b0628845..0ab01df8e22e9fcb524c4d16f03a2c8114ef1bd5 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -13,6 +13,8 @@
 #include "AMDiS.h"
 #include "MatrixVector.h"
 #include "parallel/PetscSolverFeti.h"
+#include "parallel/PetscSolverFetiDebug.h"
+#include "parallel/PetscSolverFetiMonitor.h"
 #include "parallel/PetscSolverFetiStructs.h"
 #include "parallel/PetscSolverFetiOperators.h"
 #include "parallel/PetscSolverFetiTimings.h"
@@ -25,32 +27,6 @@ namespace AMDiS {
 
   using namespace std;
 
-  PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *data)
-  {    
-    Vec Br,v,w;
-    VecDuplicate(static_cast<FetiKspData*>(data)->draft, &v);
-    VecDuplicate(static_cast<FetiKspData*>(data)->draft, &w);
-
-    KSPBuildResidual(ksp, v, w, &Br);
-
-    Vec nest0, nest1;
-    VecNestGetSubVec(Br, 0, &nest0);
-    VecNestGetSubVec(Br, 1, &nest1);
-
-    PetscScalar norm, norm0, norm1;
-    VecNorm(Br, NORM_2, &norm);
-    VecNorm(nest0, NORM_2, &norm0);
-    VecNorm(nest1, NORM_2, &norm1);
-
-    VecDestroy(&v);
-    VecDestroy(&w);
-
-    PetscPrintf(PETSC_COMM_WORLD, "%3D KSP residual norm %1.12e [%1.12e %1.12e] and preconditioned norm [%1.12e]\n",
-		n, norm, norm0, norm1, rnorm);
-
-    return 0;
-  }
-
   PetscSolverFeti::PetscSolverFeti()
     : PetscSolver(),
       schurPrimalSolver(0),
@@ -1162,22 +1138,9 @@ namespace AMDiS {
     Vec nullSpaceBasis;
     VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecArray, &nullSpaceBasis); 
 
-    int writeFetiSystem = 0;
-    Parameters::get("parallel->debug->write feti system",
-		    writeFetiSystem);
-    if (writeFetiSystem) {
-      Vec vecTmp;
-      createExplicitVec(nullSpaceBasis, vecTmp);
-      PetscViewer petscView;
-      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "nullspace.vec",
-			    FILE_MODE_WRITE, &petscView);
-      VecView(vecTmp, petscView);
-      PetscViewerDestroy(&petscView);
-
-      VecDestroy(&vecTmp);
-      MSG("Written FETI-DP null space basis vector!\n");
-    }
-
+#if (DEBUG != 0)
+    PetscSolverFetiDebug::writeNullSpace(*this, nullSpaceBasis);
+#endif
 
     MatNullSpace matNullSpace;
     MatNullSpaceCreate(mpiCommGlobal, PETSC_FALSE, 1, &nullSpaceBasis, 
@@ -1186,27 +1149,6 @@ namespace AMDiS {
     KSPSetNullSpace(ksp_feti, matNullSpace);
     MatNullSpaceDestroy(&matNullSpace);
 
-    //#if (DEBUG != 0)
-    Vec vecSol;
-    VecDuplicate(nullSpaceBasis, &vecSol);    
-    MatMult(mat_feti, nullSpaceBasis, vecSol);
-
-    Vec vecSol0, vecSol1;
-    VecNestGetSubVec(vecSol, 0, &vecSol0);
-    VecNestGetSubVec(vecSol, 1, &vecSol1);
-
-    PetscReal norm, norm0, norm1;
-    VecNorm(vecSol, NORM_2, &norm);
-    VecNorm(vecSol0, NORM_2, &norm0);
-    VecNorm(vecSol1, NORM_2, &norm1);
-
-    MSG("NORM: %e (%e %e)\n", norm, norm0, norm1);  
-    VecDestroy(&vecSol);    
-    //#endif
-
-    // AMDiS::finalize();
-    // exit(0);
-
     VecDestroy(&ktest0);
     VecDestroy(&ktest1);
     VecDestroy(&ktest2);
@@ -1772,7 +1714,10 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::solveReducedFetiMatrix()");
 
-    debugNullSpace(vec);
+#if (DEBUG != 0)
+    if (stokesMode)
+      PetscSolverFetiDebug::debugNullSpace(*this, vec);
+#endif
 
     // === Some temporary vectors. ===
 
@@ -1883,7 +1828,7 @@ namespace AMDiS {
     }
 
     // === Optionally run some debug procedures on the FETI-DP system. ===
-    debugFeti(vecRhs);
+    PetscSolverFetiDebug::debugFeti(*this, vecRhs);
 
     // === Solve with FETI-DP operator. ===
     KSPSolve(ksp_feti, vecRhs, vecSol);
@@ -2067,447 +2012,4 @@ namespace AMDiS {
     }
   }
 
-
-  void PetscSolverFeti::createInteriorMat(Mat &mat)
-  {
-    FUNCNAME("PetscSolverFeti::createInteriorMat()");
-
-    Mat &localMat = subdomain->getMatInterior();
-    int nRow, nCol;
-    MatGetSize(localMat, &nRow, &nCol);
-
-    TEST_EXIT_DBG(nRow == nCol)("Should not happen!\n");
-
-    int *dnnz = new int[nRow];
-    for (int i = 0; i < nRow; i++) {
-      MatGetRow(localMat, i, &nCol, PETSC_NULL, PETSC_NULL);
-      dnnz[i] = nCol;
-      MatRestoreRow(localMat, i, &nCol, PETSC_NULL, PETSC_NULL);
-    }
-    
-    MatCreateAIJ(PETSC_COMM_WORLD, 
-		 nRow, nRow, 
-		 nGlobalOverallInterior, nGlobalOverallInterior,
-		 0, dnnz, 0, PETSC_NULL, &mat);
-
-    PetscInt rStart, rEnd;
-    MatGetOwnershipRange(mat, &rStart, &rEnd);
-
-    for (int i = 0; i < nRow; i++) {
-      const PetscInt *cols;
-      const PetscScalar *vals;
-	
-      MatGetRow(localMat, i, &nCol, &cols, &vals);
-      for (int j = 0; j < nCol; j++)
-	MatSetValue(mat, i + rStart, cols[j] + rStart, vals[j], INSERT_VALUES);
-    }
-
-    MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY);
-
-    delete [] dnnz;
-  }
-
-
-  void PetscSolverFeti::createNestedFetiMat(Mat &mat)
-  {
-    FUNCNAME("PetscSolverFeti::createNestedFetiMat()");
-
-    if (stokesMode) {
-      vector<Mat> nestMat;
-      nestMat.resize(16);
-      
-      createInteriorMat(nestMat[0]);
-      nestMat[1] = subdomain->getMatInteriorCoarse(0);
-      nestMat[2] = subdomain->getMatInteriorCoarse(1);
-      MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &nestMat[3]);
-      nestMat[4] = subdomain->getMatCoarseInterior(0);
-      
-      nestMat[5] = subdomain->getMatCoarse(0, 0);
-      nestMat[6] = subdomain->getMatCoarse(0, 1);
-      nestMat[7] = PETSC_NULL;
-      nestMat[8] = subdomain->getMatCoarseInterior(1);
-      nestMat[9] = subdomain->getMatCoarse(1, 0);
-      nestMat[10] = PETSC_NULL;
-      nestMat[11] = PETSC_NULL;
-      nestMat[12] = mat_lagrange;
-      nestMat[13] = PETSC_NULL;
-      nestMat[14] = PETSC_NULL;
-      nestMat[15] = PETSC_NULL;
-      
-      Mat nestFetiMat;
-      MatCreateNest(mpiCommGlobal, 4, PETSC_NULL, 4, PETSC_NULL,
-		    &(nestMat[0]), &mat);
-    } else {
-      vector<Mat> nestMat;
-      nestMat.resize(9);
-      
-      createInteriorMat(nestMat[0]);
-      nestMat[1] = subdomain->getMatInteriorCoarse(0);
-      MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &nestMat[2]);
-      nestMat[3] = subdomain->getMatCoarseInterior(0);      
-      nestMat[4] = subdomain->getMatCoarse(0, 0);
-      nestMat[5] = PETSC_NULL;
-      nestMat[6] = mat_lagrange;
-      nestMat[7] = PETSC_NULL;
-      nestMat[8] = PETSC_NULL;
-      
-      Mat nestFetiMat;
-      MatCreateNest(mpiCommGlobal, 3, PETSC_NULL, 3, PETSC_NULL,
-		    &(nestMat[0]), &mat);
-    }
-  }
-
-
-  void PetscSolverFeti::debugNullSpace(SystemVector &vec)
-  {
-    FUNCNAME("PetscSolverFeti::debugNullSpace()");
-
-    TEST_EXIT(stokesMode)("This function works only for the stokes mode!\n");
-
-    Mat fetiMat;
-    createNestedFetiMat(fetiMat);
-
-    Vec vecArray[4];
-    
-
-    Vec ktest0, ktest1, ktest2;
-    localDofMap.createLocalVec(ktest0);
-    localDofMap.createLocalVec(ktest1);
-    localDofMap.createVec(ktest2, nGlobalOverallInterior);
-
-    DofMap& m = localDofMap[pressureFeSpace].getMap();
-    for (DofMap::iterator it = m.begin(); it != m.end(); ++it) {
-      if (meshDistributor->getDofMap()[pressureFeSpace].isRankDof(it->first)) {
-	int index = localDofMap.getLocalMatIndex(pressureComponent, it->first);
-	VecSetValue(ktest0, index, 1.0, INSERT_VALUES);
-      }
-    }    
-    VecAssemblyBegin(ktest0);
-    VecAssemblyEnd(ktest0);
-    MatMult(subdomain->getMatInterior(), ktest0, ktest1);
-    
-    PetscScalar *valarray;
-    VecGetArray(ktest0, &valarray);
-    VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 
-			  localDofMap.getRankDofs(), nGlobalOverallInterior, 
-			  valarray, &vecArray[0]);
-
-    Vec ktest3;
-    VecGetArray(ktest1, &valarray);
-    VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 
-			  localDofMap.getRankDofs(), nGlobalOverallInterior, 
-			  valarray, &ktest3);
-
-    primalDofMap.createVec(vecArray[1]);
-    VecSet(vecArray[1], 0.0);
-    
-    interfaceDofMap.createVec(vecArray[2]);
-    VecSet(vecArray[2], 1.0);
-
-    lagrangeMap.createVec(vecArray[3]);
-    MatMult(subdomain->getMatInteriorCoarse(1), vecArray[2], ktest2);
-    VecAXPY(ktest2, 1.0, ktest3);
-    MatMult(mat_lagrange_scaled, ktest2, vecArray[3]);
-    VecScale(vecArray[3], -1.0);
-
-    
-    Vec nullSpaceBasis;
-    VecCreateNest(mpiCommGlobal, 4, PETSC_NULL, vecArray, &nullSpaceBasis);
-
-    Vec vecSol;
-    VecDuplicate(nullSpaceBasis, &vecSol);
-    
-    MatMult(fetiMat, nullSpaceBasis, vecSol);
-    PetscReal norm;
-    VecNorm(vecSol, NORM_2, &norm);
-    MSG("Null space norm: %e\n", norm);  
-
-    Vec vec0;
-    Vec vec1;
-    Vec vec2;
-    Vec vec3;
-    VecNestGetSubVec(vecSol, 0, &vec0);
-    VecNestGetSubVec(vecSol, 1, &vec1);
-    VecNestGetSubVec(vecSol, 2, &vec2);
-    VecNestGetSubVec(vecSol, 3, &vec3);
-
-    PetscReal norm0, norm1, norm2, norm3;
-    VecNorm(vec0, NORM_2, &norm0);
-    VecNorm(vec1, NORM_2, &norm1);
-    VecNorm(vec2, NORM_2, &norm2);
-    VecNorm(vec3, NORM_2, &norm3);
-    MSG("Splitted null space norm: %e %e %e %e\n", norm0, norm1, norm2, norm3);  
-
-
-    recoverSolution(vec0, vec1, vec);
-    recoverInterfaceSolution(vec2, vec);
-
-    VtkWriter::writeFile(&vec, "nullspace.vtu");
-
-    MatDestroy(&fetiMat);
-    VecDestroy(&nullSpaceBasis);
-    VecDestroy(&vecSol);
-  }
-
-
-  void PetscSolverFeti::createExplicitFetiMat(Mat fetiMat, 
-					      Mat &explicitMat, 
-					      int &nnzCounter)
-  {
-    FUNCNAME("PetscSolverFeti::createExplicitFetiMat()");
-
-    nnzCounter = 0;
-
-    if (stokesMode == false) {     
-      int nRankRows = lagrangeMap.getRankDofs();
-      int nOverallRows = lagrangeMap.getOverallDofs();
-      MatCreateAIJ(mpiCommGlobal, 
-		   nRankRows, nRankRows, nOverallRows, nOverallRows,
-		   nOverallRows, PETSC_NULL, nOverallRows, PETSC_NULL,
-		   &explicitMat);
-
-      Vec unitVector;
-      Vec resultVector;
-      lagrangeMap.createVec(unitVector);
-      lagrangeMap.createVec(resultVector);
-
-      PetscInt low, high;
-      VecGetOwnershipRange(unitVector, &low, &high);
-      int nLocal = high - low;
-      int nnzCounter = 0;
-
-      for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) {
-	VecSet(unitVector, 0.0);
-	if (i >= low && i < high)
-	  VecSetValue(unitVector, i, 1.0, INSERT_VALUES);
-	VecAssemblyBegin(unitVector);
-	VecAssemblyEnd(unitVector);
-
-	MatMult(fetiMat, unitVector, resultVector);
-
-	if (fetiPreconditioner != FETI_NONE) {
-	  PCApply(precon_feti, resultVector, unitVector);
-	  VecCopy(unitVector, resultVector);
-	}
-
-	PetscScalar *vals;
-	VecGetArray(resultVector, &vals);
-	for (int j = 0; j < nLocal; j++) {
-	  if (fabs(vals[j]) > 1e-30) {
-	    MatSetValue(explicitMat, low + j, i, vals[j], INSERT_VALUES);
-	    nnzCounter++;
-	  }
-	}
-	VecRestoreArray(resultVector, &vals);
-      }
-
-      VecDestroy(&unitVector);
-      VecDestroy(&resultVector);      
-    } else {
-      int nRankRows = interfaceDofMap.getRankDofs() + lagrangeMap.getRankDofs();
-      int nOverallRows = 
-	interfaceDofMap.getOverallDofs() + lagrangeMap.getOverallDofs();
-
-      MatCreateAIJ(mpiCommGlobal, 
-		   nRankRows, nRankRows, nOverallRows, nOverallRows,
-		   nOverallRows, PETSC_NULL, nOverallRows, PETSC_NULL,
-		   &explicitMat);
-
-      Vec unitVector[2]; 
-      Vec resultVector[2];
-      interfaceDofMap.createVec(unitVector[0]);
-      interfaceDofMap.createVec(resultVector[0]);
-      lagrangeMap.createVec(unitVector[1]);
-      lagrangeMap.createVec(resultVector[1]);
-
-      Vec unitNestVec, resultNestVec;
-      VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, unitVector, &unitNestVec);
-      VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, resultVector, &resultNestVec);
-
-      PetscInt lowInterface, highInterface;
-      VecGetOwnershipRange(unitVector[0], &lowInterface, &highInterface);
-      int nLocalInterface = highInterface - lowInterface;
-
-      PetscInt lowLagrange, highLagrange;
-      VecGetOwnershipRange(unitVector[1], &lowLagrange, &highLagrange);
-      int nLocalLagrange = highLagrange - lowLagrange;
-
-      VecSet(unitVector[1], 0.0);
-      for (int i = 0; i < interfaceDofMap.getOverallDofs(); i++) {
-	VecSet(unitVector[0], 0.0);
-	if (i >= lowInterface && i < highInterface)
-	  VecSetValue(unitVector[0], i, 1.0, INSERT_VALUES);
-	VecAssemblyBegin(unitVector[0]);
-	VecAssemblyEnd(unitVector[0]);
-
-	VecAssemblyBegin(unitNestVec);
-	VecAssemblyEnd(unitNestVec);
-
-	MatMult(fetiMat, unitNestVec, resultNestVec);
-
-	PetscScalar *vals;
-	VecGetArray(resultVector[0], &vals);
-	for (int j = 0; j < nLocalInterface; j++) {
-	  if (fabs(vals[j]) > 1e-30) {
-	    MatSetValue(explicitMat, lowInterface + j, i, vals[j], INSERT_VALUES);
-	    nnzCounter++;
-	  }	 
-	}
-	VecRestoreArray(resultVector[0], &vals);
-
-	VecGetArray(resultVector[1], &vals);
-	for (int j = 0; j < nLocalLagrange; j++) {
-	  if (fabs(vals[j]) > 1e-30) {
-	    MatSetValue(explicitMat, 
-			interfaceDofMap.getOverallDofs() + lowLagrange + j, i, 
-			vals[j], INSERT_VALUES);
-	    nnzCounter++;
-	  }	 
-	}
-	VecRestoreArray(resultVector[1], &vals);
-      }
-
-
-      VecSet(unitVector[0], 0.0);
-      for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) {
-	VecSet(unitVector[1], 0.0);
-	if (i >= lowLagrange && i < highLagrange)
-	  VecSetValue(unitVector[1], i, 1.0, INSERT_VALUES);
-	VecAssemblyBegin(unitVector[1]);
-	VecAssemblyEnd(unitVector[1]);
-
-	VecAssemblyBegin(unitNestVec);
-	VecAssemblyEnd(unitNestVec);
-
-
-	MatMult(fetiMat, unitNestVec, resultNestVec);
-
-	PetscScalar *vals;
-	VecGetArray(resultVector[0], &vals);
-	for (int j = 0; j < nLocalInterface; j++) {
-	  if (fabs(vals[j]) > 1e-30) {
-	    MatSetValue(explicitMat, 
-			lowInterface + j, 
-			interfaceDofMap.getOverallDofs() + i, 
-			vals[j], INSERT_VALUES);
-	    nnzCounter++;
-	  }	 
-	}
-	VecRestoreArray(resultVector[0], &vals);
-
-	VecGetArray(resultVector[1], &vals);
-	for (int j = 0; j < nLocalLagrange; j++) {
-	  if (fabs(vals[j]) > 1e-30) {
-	    MatSetValue(explicitMat, 
-			interfaceDofMap.getOverallDofs() + lowLagrange + j, 
-			interfaceDofMap.getOverallDofs() + i, 
-			vals[j], INSERT_VALUES);
-	    nnzCounter++;
-	  }	 
-	}
-	VecRestoreArray(resultVector[1], &vals);
-      }
-
-      VecDestroy(&unitVector[0]);
-      VecDestroy(&unitVector[1]);
-      VecDestroy(&resultVector[0]);
-      VecDestroy(&resultVector[1]);
-      VecDestroy(&unitNestVec);
-      VecDestroy(&resultNestVec);
-    }
-
-    MatAssemblyBegin(explicitMat, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(explicitMat, MAT_FINAL_ASSEMBLY);
-    mpi::globalAdd(nnzCounter);
-  }
-
-
-  void PetscSolverFeti::createExplicitVec(Vec nestedVec, Vec &explicitVec)
-  {
-    FUNCNAME("PetscSolverFeti::createExplicitVec()");
-
-    int nNested = 0;
-    VecNestGetSize(nestedVec, &nNested);
-    
-    TEST_EXIT_DBG(nNested == 2)
-      ("Only supported for 2-nest vectors, not for %d-nest vectors!\n", nNested);
-
-    Vec v0, v1;
-    VecNestGetSubVec(nestedVec, 0, &v0);
-    VecNestGetSubVec(nestedVec, 1, &v1);
-
-    int s0, s1;
-    VecGetSize(v0, &s0);
-    VecGetSize(v1, &s1);
-
-    int l0, l1;
-    VecGetLocalSize(v0, &l0);
-    VecGetLocalSize(v1, &l1);
-
-    int rStart0, rStart1;
-    VecGetOwnershipRange(v0, &rStart0, PETSC_NULL);
-    VecGetOwnershipRange(v1, &rStart1, PETSC_NULL);
-    
-    VecCreateMPI(mpiCommGlobal, l0 + l1, s0 + s1, &explicitVec);
-
-    double *val;
-    VecGetArray(v0, &val);
-    for (int i = 0; i < l0; i++)
-      VecSetValue(explicitVec, rStart0 + i, val[i], INSERT_VALUES);
-    VecRestoreArray(v0, &val);
-
-    VecGetArray(v1, &val);
-    for (int i = 0; i < l1; i++)
-      VecSetValue(explicitVec, s0 + rStart1 + i, val[i], INSERT_VALUES);
-    VecRestoreArray(v1, &val);
-
-    VecAssemblyBegin(explicitVec);
-    VecAssemblyEnd(explicitVec);
-  }
-
-      
-  void PetscSolverFeti::debugFeti(Vec dbgRhsVec)
-  {
-    FUNCNAME("PetscSolverFeti:::debugFeti()");
-
-    int writeFetiSystem = 0;
-    Parameters::get("parallel->debug->write feti system",
-		    writeFetiSystem);
-    if (!writeFetiSystem)
-      return;
-
-    MSG("Start creating explicit FETI-DP matrix!\n");
-    
-    Mat fetiMat;
-    int nnzCounter = 0;    
-    createExplicitFetiMat(mat_feti, fetiMat, nnzCounter);
-    
-    PetscViewer petscView;
-    PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti.mat", 
-			  FILE_MODE_WRITE, &petscView);
-    MatView(fetiMat, petscView);
-    PetscViewerDestroy(&petscView);
-    
-    bool a = testMatrixSymmetric(fetiMat, true);
-    MSG("SYMMETRIC TEST: %d\n", a);
-    
-    int r, c;
-    MatGetSize(fetiMat, &r, &c);
-    
-    MSG("FETI-DP matrix written: %d x %d mat with %d nnz\n", 
-	r, c, nnzCounter);
-
-    MatDestroy(&fetiMat);
-
-    Vec rhsVec;
-    createExplicitVec(dbgRhsVec, rhsVec);
-    
-    PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti_rhs.vec",
-			  FILE_MODE_WRITE, &petscView);
-    VecView(rhsVec, petscView);
-    PetscViewerDestroy(&petscView);
-
-    VecDestroy(&rhsVec);
-  }
 }
diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h
index 3cf47f3afe4cad189e96c182ff22eec24691b963..a90a1a61e8fceef37310883f80f5b2b81583d497 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.h
+++ b/AMDiS/src/parallel/PetscSolverFeti.h
@@ -21,6 +21,7 @@
 /** \file PetscSolverFeti.h */
 
 #include <map>
+#include "AMDiS_fwd.h"
 #include "parallel/MpiHelper.h"
 #include "parallel/PetscSolver.h"
 #include "parallel/PetscSolverFetiStructs.h"
@@ -223,24 +224,6 @@ namespace AMDiS {
     /// order of: 1/h^2
     void createH2vec(DOFVector<double> &vec);
 
-    /// For debugging only!
-    void createInteriorMat(Mat &mat);
-
-    /// For debugging only!
-    void createNestedFetiMat(Mat &mat);
-
-    /// For debugging only!
-    void debugNullSpace(SystemVector &vec);
-
-    /// For debugging only!
-    void createExplicitFetiMat(Mat fetiMat, Mat &explicitmat, int &nnzCounter);
-
-    /// For debugging only!
-    void createExplicitVec(Vec nestedVec, Vec &explicitVec);
-
-    /// For debugging only!
-    void debugFeti(Vec dbgRhsVec);
-
   protected:
     /// Mapping from primal DOF indices to a global index of primals.
     ParallelDofMapping primalDofMap;
@@ -341,6 +324,8 @@ namespace AMDiS {
     int pressureComponent;
 
     map<const FiniteElemSpace*, std::set<DegreeOfFreedom> > dirichletRows;
+
+    friend class PetscSolverFetiDebug;
   };
 
 }
diff --git a/AMDiS/src/parallel/PetscSolverFetiDebug.cc b/AMDiS/src/parallel/PetscSolverFetiDebug.cc
new file mode 100644
index 0000000000000000000000000000000000000000..b234568d3f922affebfd700e958dfc1aafa8273e
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverFetiDebug.cc
@@ -0,0 +1,532 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+#include <mpi.h>
+#include <petsc.h>
+#include "parallel/ParallelDofMapping.h"
+#include "parallel/PetscSolverFeti.h"
+#include "parallel/PetscSolverFetiDebug.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+  void PetscSolverFetiDebug::debugNullSpace(PetscSolverFeti &feti, 
+					    SystemVector &vec)
+  {
+    FUNCNAME("PetscSolverFetiDebug::debugNullSpace()");
+
+    TEST_EXIT(feti.stokesMode)
+      ("This function works only for the stokes mode!\n");
+
+    // === First, get the null space basis directly from the mat object and ===
+    // === make a test whether this is a member of the null space.          ===
+
+    {
+      MatNullSpace nullSpace;
+      MatGetNullSpace(feti.mat_feti, &nullSpace);
+
+      PetscBool hasConst;
+      PetscInt nBasisVec;
+      const Vec *nullSpaceBasis;
+      MatNullSpaceGetVecs(nullSpace, &hasConst, &nBasisVec, &nullSpaceBasis);
+
+      TEST_EXIT(nBasisVec == 1)("Something is wrong!\n");
+	  
+      Vec vecSol;
+      VecDuplicate(nullSpaceBasis[0], &vecSol);    
+      MatMult(feti.mat_feti, nullSpaceBasis[0], vecSol);
+      
+      Vec vecSol0, vecSol1;
+      VecNestGetSubVec(vecSol, 0, &vecSol0);
+      VecNestGetSubVec(vecSol, 1, &vecSol1);
+      
+      PetscReal norm, norm0, norm1;
+      VecNorm(vecSol, NORM_2, &norm);
+      VecNorm(vecSol0, NORM_2, &norm0);
+      VecNorm(vecSol1, NORM_2, &norm1);
+      
+      MSG("Null space norm: %e (%e %e)\n", norm, norm0, norm1);  
+      VecDestroy(&vecSol);    
+    }
+
+
+    // === Try to reproduce the null space basis on the non reduced form of ===
+    // === the FETI-DP system.                                              ===
+
+    Mat fetiMat;
+    createNestedFetiMat(feti, fetiMat);
+
+    Vec vecArray[4];
+    
+    Vec ktest0, ktest1, ktest2;
+    ParallelDofMapping &localDofMap = feti.localDofMap;
+    localDofMap.createLocalVec(ktest0);
+    localDofMap.createLocalVec(ktest1);
+    localDofMap.createVec(ktest2, feti.nGlobalOverallInterior);
+
+    DofMap& m = localDofMap[feti.pressureFeSpace].getMap();
+    for (DofMap::iterator it = m.begin(); it != m.end(); ++it) {
+      if (feti.meshDistributor->getDofMap()[feti.pressureFeSpace].isRankDof(it->first)) {
+	int index = localDofMap.getLocalMatIndex(feti.pressureComponent, it->first);
+	VecSetValue(ktest0, index, 1.0, INSERT_VALUES);
+      }
+    }    
+    VecAssemblyBegin(ktest0);
+    VecAssemblyEnd(ktest0);
+    MatMult(feti.subdomain->getMatInterior(), ktest0, ktest1);
+    
+    PetscScalar *valarray;
+    VecGetArray(ktest0, &valarray);
+    VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 
+			  localDofMap.getRankDofs(), feti.nGlobalOverallInterior, 
+			  valarray, &vecArray[0]);
+
+    Vec ktest3;
+    VecGetArray(ktest1, &valarray);
+    VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 
+			  localDofMap.getRankDofs(), feti.nGlobalOverallInterior, 
+			  valarray, &ktest3);
+
+    feti.primalDofMap.createVec(vecArray[1]);
+    VecSet(vecArray[1], 0.0);
+    
+    feti.interfaceDofMap.createVec(vecArray[2]);
+    VecSet(vecArray[2], 1.0);
+
+    feti.lagrangeMap.createVec(vecArray[3]);
+    MatMult(feti.subdomain->getMatInteriorCoarse(1), vecArray[2], ktest2);
+    VecAXPY(ktest2, 1.0, ktest3);
+    MatMult(feti.mat_lagrange_scaled, ktest2, vecArray[3]);
+    VecScale(vecArray[3], -1.0);
+
+    
+    Vec nullSpaceBasis;
+    VecCreateNest(feti.mpiCommGlobal, 4, PETSC_NULL, vecArray, &nullSpaceBasis);
+
+    Vec vecSol;
+    VecDuplicate(nullSpaceBasis, &vecSol);
+    
+    MatMult(fetiMat, nullSpaceBasis, vecSol);
+    PetscReal norm;
+    VecNorm(vecSol, NORM_2, &norm);
+    MSG("Null space norm: %e\n", norm);  
+
+    Vec vec0;
+    Vec vec1;
+    Vec vec2;
+    Vec vec3;
+    VecNestGetSubVec(vecSol, 0, &vec0);
+    VecNestGetSubVec(vecSol, 1, &vec1);
+    VecNestGetSubVec(vecSol, 2, &vec2);
+    VecNestGetSubVec(vecSol, 3, &vec3);
+
+    PetscReal norm0, norm1, norm2, norm3;
+    VecNorm(vec0, NORM_2, &norm0);
+    VecNorm(vec1, NORM_2, &norm1);
+    VecNorm(vec2, NORM_2, &norm2);
+    VecNorm(vec3, NORM_2, &norm3);
+    MSG("Splitted null space norm: %e %e %e %e\n", norm0, norm1, norm2, norm3);  
+
+
+    feti.recoverSolution(vec0, vec1, vec);
+    feti.recoverInterfaceSolution(vec2, vec);
+
+    VtkWriter::writeFile(&vec, "nullspace.vtu");
+
+    MatDestroy(&fetiMat);
+    VecDestroy(&nullSpaceBasis);
+    VecDestroy(&vecSol);
+  }
+
+
+  void PetscSolverFetiDebug::createInteriorMat(PetscSolverFeti &feti, Mat &mat)
+  {
+    FUNCNAME("PetscSolverFetiDebug::createInteriorMat()");
+
+    Mat &localMat = feti.subdomain->getMatInterior();
+    int nRow, nCol;
+    MatGetSize(localMat, &nRow, &nCol);
+
+    TEST_EXIT_DBG(nRow == nCol)("Should not happen!\n");
+
+    int *dnnz = new int[nRow];
+    for (int i = 0; i < nRow; i++) {
+      MatGetRow(localMat, i, &nCol, PETSC_NULL, PETSC_NULL);
+      dnnz[i] = nCol;
+      MatRestoreRow(localMat, i, &nCol, PETSC_NULL, PETSC_NULL);
+    }
+    
+    MatCreateAIJ(PETSC_COMM_WORLD, 
+		 nRow, nRow, 
+		 feti.nGlobalOverallInterior, feti.nGlobalOverallInterior,
+		 0, dnnz, 0, PETSC_NULL, &mat);
+
+    PetscInt rStart, rEnd;
+    MatGetOwnershipRange(mat, &rStart, &rEnd);
+
+    for (int i = 0; i < nRow; i++) {
+      const PetscInt *cols;
+      const PetscScalar *vals;
+	
+      MatGetRow(localMat, i, &nCol, &cols, &vals);
+      for (int j = 0; j < nCol; j++)
+	MatSetValue(mat, i + rStart, cols[j] + rStart, vals[j], INSERT_VALUES);
+    }
+
+    MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY);
+
+    delete [] dnnz;
+  }
+
+   
+  void PetscSolverFetiDebug::createNestedFetiMat(PetscSolverFeti &feti, Mat &mat)
+  {
+    FUNCNAME("PetscSolverFetiDebug::createNestedFetiMat()");
+
+    PetscSolver *subdomain = feti.subdomain;
+    if (feti.stokesMode) {
+      vector<Mat> nestMat;
+      nestMat.resize(16);
+      
+      createInteriorMat(feti, nestMat[0]);
+      nestMat[1] = subdomain->getMatInteriorCoarse(0);
+      nestMat[2] = subdomain->getMatInteriorCoarse(1);
+      MatTranspose(feti.mat_lagrange, MAT_INITIAL_MATRIX, &nestMat[3]);
+      nestMat[4] = subdomain->getMatCoarseInterior(0);
+      
+      nestMat[5] = subdomain->getMatCoarse(0, 0);
+      nestMat[6] = subdomain->getMatCoarse(0, 1);
+      nestMat[7] = PETSC_NULL;
+      nestMat[8] = subdomain->getMatCoarseInterior(1);
+      nestMat[9] = subdomain->getMatCoarse(1, 0);
+      nestMat[10] = PETSC_NULL;
+      nestMat[11] = PETSC_NULL;
+      nestMat[12] = feti.mat_lagrange;
+      nestMat[13] = PETSC_NULL;
+      nestMat[14] = PETSC_NULL;
+      nestMat[15] = PETSC_NULL;
+      
+      Mat nestFetiMat;
+      MatCreateNest(feti.mpiCommGlobal, 4, PETSC_NULL, 4, PETSC_NULL,
+		    &(nestMat[0]), &mat);
+    } else {
+      vector<Mat> nestMat;
+      nestMat.resize(9);
+      
+      createInteriorMat(feti, nestMat[0]);
+      nestMat[1] = subdomain->getMatInteriorCoarse(0);
+      MatTranspose(feti.mat_lagrange, MAT_INITIAL_MATRIX, &nestMat[2]);
+      nestMat[3] = subdomain->getMatCoarseInterior(0);      
+      nestMat[4] = subdomain->getMatCoarse(0, 0);
+      nestMat[5] = PETSC_NULL;
+      nestMat[6] = feti.mat_lagrange;
+      nestMat[7] = PETSC_NULL;
+      nestMat[8] = PETSC_NULL;
+      
+      Mat nestFetiMat;
+      MatCreateNest(feti.mpiCommGlobal, 3, PETSC_NULL, 3, PETSC_NULL,
+		    &(nestMat[0]), &mat);
+    }
+  }
+
+
+  void PetscSolverFetiDebug::createExplicitFetiMat(PetscSolverFeti &feti,
+						   Mat fetiMat, 
+						   Mat &explicitMat, 
+						   int &nnzCounter)
+  {
+    FUNCNAME("PetscSolverFetiDebug::createExplicitFetiMat()");
+
+    ParallelDofMapping &lagrangeMap = feti.lagrangeMap;
+
+    nnzCounter = 0;
+
+    if (feti.stokesMode == false) {     
+      int nRankRows = lagrangeMap.getRankDofs();
+      int nOverallRows = lagrangeMap.getOverallDofs();
+      MatCreateAIJ(feti.mpiCommGlobal, 
+		   nRankRows, nRankRows, nOverallRows, nOverallRows,
+		   nOverallRows, PETSC_NULL, nOverallRows, PETSC_NULL,
+		   &explicitMat);
+
+      Vec unitVector;
+      Vec resultVector;
+      lagrangeMap.createVec(unitVector);
+      lagrangeMap.createVec(resultVector);
+
+      PetscInt low, high;
+      VecGetOwnershipRange(unitVector, &low, &high);
+      int nLocal = high - low;
+      int nnzCounter = 0;
+
+      for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) {
+	VecSet(unitVector, 0.0);
+	if (i >= low && i < high)
+	  VecSetValue(unitVector, i, 1.0, INSERT_VALUES);
+	VecAssemblyBegin(unitVector);
+	VecAssemblyEnd(unitVector);
+
+	MatMult(fetiMat, unitVector, resultVector);
+
+	if (feti.fetiPreconditioner != FETI_NONE) {
+	  PCApply(feti.precon_feti, resultVector, unitVector);
+	  VecCopy(unitVector, resultVector);
+	}
+
+	PetscScalar *vals;
+	VecGetArray(resultVector, &vals);
+	for (int j = 0; j < nLocal; j++) {
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(explicitMat, low + j, i, vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }
+	}
+	VecRestoreArray(resultVector, &vals);
+      }
+
+      VecDestroy(&unitVector);
+      VecDestroy(&resultVector);      
+    } else {
+      ParallelDofMapping &interfaceDofMap = feti.interfaceDofMap;
+
+      int nRankRows = interfaceDofMap.getRankDofs() + lagrangeMap.getRankDofs();
+      int nOverallRows = 
+	interfaceDofMap.getOverallDofs() + lagrangeMap.getOverallDofs();
+
+      MatCreateAIJ(feti.mpiCommGlobal, 
+		   nRankRows, nRankRows, nOverallRows, nOverallRows,
+		   nOverallRows, PETSC_NULL, nOverallRows, PETSC_NULL,
+		   &explicitMat);
+
+      Vec unitVector[2]; 
+      Vec resultVector[2];
+      interfaceDofMap.createVec(unitVector[0]);
+      interfaceDofMap.createVec(resultVector[0]);
+      lagrangeMap.createVec(unitVector[1]);
+      lagrangeMap.createVec(resultVector[1]);
+
+      Vec unitNestVec, resultNestVec;
+      VecCreateNest(feti.mpiCommGlobal, 2, PETSC_NULL, unitVector, &unitNestVec);
+      VecCreateNest(feti.mpiCommGlobal, 2, PETSC_NULL, resultVector, &resultNestVec);
+
+      PetscInt lowInterface, highInterface;
+      VecGetOwnershipRange(unitVector[0], &lowInterface, &highInterface);
+      int nLocalInterface = highInterface - lowInterface;
+
+      PetscInt lowLagrange, highLagrange;
+      VecGetOwnershipRange(unitVector[1], &lowLagrange, &highLagrange);
+      int nLocalLagrange = highLagrange - lowLagrange;
+
+      VecSet(unitVector[1], 0.0);
+      for (int i = 0; i < interfaceDofMap.getOverallDofs(); i++) {
+	VecSet(unitVector[0], 0.0);
+	if (i >= lowInterface && i < highInterface)
+	  VecSetValue(unitVector[0], i, 1.0, INSERT_VALUES);
+	VecAssemblyBegin(unitVector[0]);
+	VecAssemblyEnd(unitVector[0]);
+
+	VecAssemblyBegin(unitNestVec);
+	VecAssemblyEnd(unitNestVec);
+
+	MatMult(fetiMat, unitNestVec, resultNestVec);
+
+	PetscScalar *vals;
+	VecGetArray(resultVector[0], &vals);
+	for (int j = 0; j < nLocalInterface; j++) {
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(explicitMat, lowInterface + j, i, vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }	 
+	}
+	VecRestoreArray(resultVector[0], &vals);
+
+	VecGetArray(resultVector[1], &vals);
+	for (int j = 0; j < nLocalLagrange; j++) {
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(explicitMat, 
+			interfaceDofMap.getOverallDofs() + lowLagrange + j, i, 
+			vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }	 
+	}
+	VecRestoreArray(resultVector[1], &vals);
+      }
+
+
+      VecSet(unitVector[0], 0.0);
+      for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) {
+	VecSet(unitVector[1], 0.0);
+	if (i >= lowLagrange && i < highLagrange)
+	  VecSetValue(unitVector[1], i, 1.0, INSERT_VALUES);
+	VecAssemblyBegin(unitVector[1]);
+	VecAssemblyEnd(unitVector[1]);
+
+	VecAssemblyBegin(unitNestVec);
+	VecAssemblyEnd(unitNestVec);
+
+
+	MatMult(fetiMat, unitNestVec, resultNestVec);
+
+	PetscScalar *vals;
+	VecGetArray(resultVector[0], &vals);
+	for (int j = 0; j < nLocalInterface; j++) {
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(explicitMat, 
+			lowInterface + j, 
+			interfaceDofMap.getOverallDofs() + i, 
+			vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }	 
+	}
+	VecRestoreArray(resultVector[0], &vals);
+
+	VecGetArray(resultVector[1], &vals);
+	for (int j = 0; j < nLocalLagrange; j++) {
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(explicitMat, 
+			interfaceDofMap.getOverallDofs() + lowLagrange + j, 
+			interfaceDofMap.getOverallDofs() + i, 
+			vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }	 
+	}
+	VecRestoreArray(resultVector[1], &vals);
+      }
+
+      VecDestroy(&unitVector[0]);
+      VecDestroy(&unitVector[1]);
+      VecDestroy(&resultVector[0]);
+      VecDestroy(&resultVector[1]);
+      VecDestroy(&unitNestVec);
+      VecDestroy(&resultNestVec);
+    }
+
+    MatAssemblyBegin(explicitMat, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(explicitMat, MAT_FINAL_ASSEMBLY);
+    mpi::globalAdd(nnzCounter);
+  }
+
+
+  void PetscSolverFetiDebug::createExplicitVec(PetscSolverFeti &feti, 
+					       Vec nestedVec, 
+					       Vec &explicitVec)
+  {
+    FUNCNAME("PetscSolverFetiDebug::createExplicitVec()");
+
+    int nNested = 0;
+    VecNestGetSize(nestedVec, &nNested);
+    
+    TEST_EXIT_DBG(nNested == 2)
+      ("Only supported for 2-nest vectors, not for %d-nest vectors!\n", nNested);
+
+    Vec v0, v1;
+    VecNestGetSubVec(nestedVec, 0, &v0);
+    VecNestGetSubVec(nestedVec, 1, &v1);
+
+    int s0, s1;
+    VecGetSize(v0, &s0);
+    VecGetSize(v1, &s1);
+
+    int l0, l1;
+    VecGetLocalSize(v0, &l0);
+    VecGetLocalSize(v1, &l1);
+
+    int rStart0, rStart1;
+    VecGetOwnershipRange(v0, &rStart0, PETSC_NULL);
+    VecGetOwnershipRange(v1, &rStart1, PETSC_NULL);
+    
+    VecCreateMPI(feti.mpiCommGlobal, l0 + l1, s0 + s1, &explicitVec);
+
+    double *val;
+    VecGetArray(v0, &val);
+    for (int i = 0; i < l0; i++)
+      VecSetValue(explicitVec, rStart0 + i, val[i], INSERT_VALUES);
+    VecRestoreArray(v0, &val);
+
+    VecGetArray(v1, &val);
+    for (int i = 0; i < l1; i++)
+      VecSetValue(explicitVec, s0 + rStart1 + i, val[i], INSERT_VALUES);
+    VecRestoreArray(v1, &val);
+
+    VecAssemblyBegin(explicitVec);
+    VecAssemblyEnd(explicitVec);
+  }
+
+
+  void PetscSolverFetiDebug::writeNullSpace(PetscSolverFeti& feti,
+					    Vec nullSpaceBasis)
+  {
+    FUNCNAME("PetscSolverFetiDebug::writeNullSpace()");
+
+    int writeFetiSystem = 0;
+    Parameters::get("parallel->debug->write feti system",
+		    writeFetiSystem);
+    if (writeFetiSystem) {
+      Vec vecTmp;
+      createExplicitVec(feti, nullSpaceBasis, vecTmp);
+      PetscViewer petscView;
+      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "nullspace.vec",
+			    FILE_MODE_WRITE, &petscView);
+      VecView(vecTmp, petscView);
+      PetscViewerDestroy(&petscView);
+
+      VecDestroy(&vecTmp);
+      MSG("Written FETI-DP null space basis vector!\n");
+    }
+  }      
+
+
+  void PetscSolverFetiDebug::debugFeti(PetscSolverFeti &feti, Vec dbgRhsVec)
+  {
+    FUNCNAME("PetscSolverFetiDebug:::debugFeti()");
+
+    int writeFetiSystem = 0;
+    Parameters::get("parallel->debug->write feti system",
+		    writeFetiSystem);
+    if (!writeFetiSystem)
+      return;
+
+    MSG("Start creating explicit FETI-DP matrix!\n");
+    
+    Mat fetiMat;
+    int nnzCounter = 0;    
+    createExplicitFetiMat(feti, feti.mat_feti, fetiMat, nnzCounter);
+    
+    PetscViewer petscView;
+    PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti.mat", 
+			  FILE_MODE_WRITE, &petscView);
+    MatView(fetiMat, petscView);
+    PetscViewerDestroy(&petscView);
+    
+    bool a = feti.testMatrixSymmetric(fetiMat, true);
+    MSG("SYMMETRIC TEST: %d\n", a);
+    
+    int r, c;
+    MatGetSize(fetiMat, &r, &c);
+    
+    MSG("FETI-DP matrix written: %d x %d mat with %d nnz\n", 
+	r, c, nnzCounter);
+
+    MatDestroy(&fetiMat);
+
+    Vec rhsVec;
+    createExplicitVec(feti, dbgRhsVec, rhsVec);
+    
+    PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti_rhs.vec",
+			  FILE_MODE_WRITE, &petscView);
+    VecView(rhsVec, petscView);
+    PetscViewerDestroy(&petscView);
+
+    VecDestroy(&rhsVec);
+  }
+}
diff --git a/AMDiS/src/parallel/PetscSolverFetiDebug.h b/AMDiS/src/parallel/PetscSolverFetiDebug.h
new file mode 100644
index 0000000000000000000000000000000000000000..a74992c12e66f49e30e062dfc3a0935858837722
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverFetiDebug.h
@@ -0,0 +1,134 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscSolverFetiDebug.h */
+
+#ifndef AMDIS_PETSC_SOLVER_FETI_DEBUG_H
+#define AMDIS_PETSC_SOLVER_FETI_DEBUG_H
+
+namespace AMDiS {
+
+  using namespace std;
+
+  /** \brief
+   * This class collects several static functions which may be used to debug
+   * FETI-DP solver. All functions should be used only in debug mode and
+   * never in productive runs as they may be arbitrarly slow.
+   *
+   * This class must be a friend class of \ref PetscSolverFeti as it must have
+   * access to all internal data.
+   */
+  class PetscSolverFetiDebug
+  {
+  public:
+    /** \brief
+     * Is used to test the null space in stokes mode. The function reads the
+     * null space basis from FETI mat object and test by simple matrix 
+     * multiplication, whether the vector is a member of the null space.
+     * Furthermore, the function creates the null space on the non reduced
+     * form of the FETI-DP system and make there the same test. The resulting
+     * test vector is also written to "nullspace.vtu" file.
+     *
+     *
+     * \param[in]  feti    FETI-DP solver
+     * \param[in]  vec     Is just used as a temporary template vector.
+     */
+    static void debugNullSpace(PetscSolverFeti &feti, SystemVector &vec);
+
+    /** \brief
+     * Creates a parallel distributed matrix of the local (B-index) matrices. 
+     * Note that the matrix is still a block matrix and there are no off 
+     * diagonal values.
+     *
+     * \param[in]  feti     FETI-DP solver
+     * \param[out] mat      The parallel distributed matrix will be created
+     *                      on this variable.
+     */
+    static void createInteriorMat(PetscSolverFeti &feti, Mat &mat);
+
+    /** \brief
+     * Creates the non reduced FETI-DP system (also in stokes mode) as a 
+     * nested matrix.
+     *
+     * \param[in]  feti     FETI-DP solver
+     * \param[out] mat      The resulting nested matrix.
+     */
+    static void createNestedFetiMat(PetscSolverFeti &feti, Mat &mat);
+
+    /** \brief
+     * Creates the reduced FETI-DP operator in an explicitly given matrix
+     * form (also in stokes mode). The resulting matrix is dense.
+     *
+     * \param[in]  feti         FETI-DP solver
+     * \param[in]  fetiMat      Matrix object representing the implicit
+     *                          FETI-DP operator
+     * \param[out] explicitMat  Explicit matrix formulation
+     * \param[out] nnzCounter   Stores the number of non zero elements in the
+     *                          explicit matrix formulation
+     */
+    static void createExplicitFetiMat(PetscSolverFeti &feti, 
+				      Mat fetiMat, 
+				      Mat &explicitmat, 
+				      int &nnzCounter);
+
+    /** \brief
+     * Create a non nested parallel distributed vector from a nested vector of
+     * two parallel distributed vectors. Is used to change the structure of
+     * solution vectors in stokes mode, which always have two components.
+     *
+     * \param[in]  feti         FETI-DP solver
+     * \param[in]  nestedVec    Nested PETSc vector with two sub vectors.
+     * \param[out] explicitVec  The output vector.
+     */
+    static void createExplicitVec(PetscSolverFeti &feti, 
+				  Vec nestedVec, 
+				  Vec &explicitVec);
+
+    /** \brief
+     * Checks for the init file parameter "parallel->debug->write fety system"
+     * and if it is true, writes the null space basis vector to the PETSc binary
+     * file "nullspace.vec".
+     *
+     * \param[in] feti            FETI-DP solver
+     * \param[in] nullSpaceBasis  Vector representing the basis of the null
+     *                            space of the FETI-DP system.
+     */
+    static void writeNullSpace(PetscSolverFeti &feti, 
+			       Vec nullSpaceBasis);
+
+    /** \brief
+     * Checks for the init file parameter "parallel->debug->write fety system"
+     * and if it is true, some debug test of the FETI-DP system are made:
+     *   - symmetry test of the FETI-DP operator (by assembling it in an
+     *     explicit formulation)
+     *   - write explicit FETI-DP operator to "feti.mat"
+     *   - write rhs to "feti_rhs.vec"
+     *
+     * \param[in] feti       FETI-DP solver
+     * \param[in] dbgRhsVec  Vector representing the rhs vector of the 
+     *                       FETI-DP system.
+     */
+    static void debugFeti(PetscSolverFeti &feti, 
+			  Vec dbgRhsVec);
+  };
+
+}
+
+#endif
diff --git a/AMDiS/src/parallel/PetscSolverFetiMonitor.cc b/AMDiS/src/parallel/PetscSolverFetiMonitor.cc
new file mode 100644
index 0000000000000000000000000000000000000000..403b52fa7f5c6eb6f7a7b5b58b149bfdd7e5c0c7
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverFetiMonitor.cc
@@ -0,0 +1,44 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+#include "parallel/PetscSolverFetiMonitor.h"
+#include "parallel/PetscSolverFetiStructs.h"
+
+namespace AMDiS {
+
+  PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *data)
+  {    
+    Vec Br,v,w;
+    VecDuplicate(static_cast<FetiKspData*>(data)->draft, &v);
+    VecDuplicate(static_cast<FetiKspData*>(data)->draft, &w);
+
+    KSPBuildResidual(ksp, v, w, &Br);
+
+    Vec nest0, nest1;
+    VecNestGetSubVec(Br, 0, &nest0);
+    VecNestGetSubVec(Br, 1, &nest1);
+
+    PetscScalar norm, norm0, norm1;
+    VecNorm(Br, NORM_2, &norm);
+    VecNorm(nest0, NORM_2, &norm0);
+    VecNorm(nest1, NORM_2, &norm1);
+
+    VecDestroy(&v);
+    VecDestroy(&w);
+
+    PetscPrintf(PETSC_COMM_WORLD, "%3D KSP residual norm %1.12e [%1.12e %1.12e] and preconditioned norm [%1.12e]\n",
+		n, norm, norm0, norm1, rnorm);
+
+    return 0;
+  }
+
+}
diff --git a/AMDiS/src/parallel/PetscSolverFetiMonitor.h b/AMDiS/src/parallel/PetscSolverFetiMonitor.h
new file mode 100644
index 0000000000000000000000000000000000000000..d3bb65aaf10356134a993ebfeeaec75fab2ec689
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverFetiMonitor.h
@@ -0,0 +1,35 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscSolverFetiMonitor.h */
+
+#ifndef AMDIS_PETSC_SOLVER_FETI_MONITOR_H
+#define AMDIS_PETSC_SOLVER_FETI_MONITOR_H
+
+#include <mpi.h>
+#include <petsc.h>
+
+namespace AMDiS {
+
+  PetscErrorCode KSPMonitorFetiStokes(KSP ksp, PetscInt n, PetscReal rnorm, void *data);
+  
+}
+
+#endif