diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 67d9226a0961a30c8836b694f4d152db7fc5dc39..e7241aaf32085cd31b04a5943eb119612df3ff60 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -94,6 +94,9 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::createPrimals()");  
 
+    // === Define all vertices on the interior boundaries of the macro mesh ===
+    // === to be primal variables.                                          ===
+
     primals.clear();
     DofContainerSet& vertices = 
       meshDistributor->getBoundaryDofInfo().geoDofs[VERTEX];
@@ -102,6 +105,10 @@ namespace AMDiS {
 	 it != vertices.end(); ++it)
       primals.insert(**it);
 
+
+    // === Calculate the number of primals that are owned by the rank and ===
+    // === create local indices of the primals starting at zero.          ===
+
     globalPrimalIndex.clear();
     nRankPrimals = 0;
     for (DofIndexSet::iterator it = primals.begin(); it != primals.end(); ++it)
@@ -111,11 +118,17 @@ namespace AMDiS {
       }
 
 
+    // === Get overall number of primals and rank's displacement in the ===
+    // === numbering of the primals.                                    ===
+
     nOverallPrimals = 0;
     rStartPrimals = 0;
     mpi::getDofNumbering(meshDistributor->getMpiComm(),
 			 nRankPrimals, rStartPrimals, nOverallPrimals);
 
+
+    // === Create global primal index for all primals. ===
+
     for (DofMapping::iterator it = globalPrimalIndex.begin();
 	 it != globalPrimalIndex.end(); ++it)
       it->second += rStartPrimals;
@@ -123,6 +136,11 @@ namespace AMDiS {
     MSG_DBG("nRankPrimals = %d   nOverallPrimals = %d\n",
 	    nRankPrimals, nOverallPrimals);
 
+
+    // === Communicate primal's global index from ranks that own the     ===
+    // === primals to ranks that contain this primals but are not owning ===
+    // === them.                                                         ===
+
     StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm());
     RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
     for (RankToDofContainer::iterator it = sendDofs.begin();
@@ -175,8 +193,8 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::createDuals()");
     
-    // === Create for each dual node the set of ranks that contain this ===
-    // === node (denoted by W(x_j)).                                    ===
+    // === Create for each dual node that is owned by the rank, the set ===
+    // === of ranks that contain this node (denoted by W(x_j)).         ===
 
     boundaryDofRanks.clear();
 
@@ -193,6 +211,10 @@ namespace AMDiS {
       }
     }
 
+
+    // === Communicate these sets for all rank owned dual nodes to other ===
+    // === ranks that also have this node.                               ===
+
     StdMpi<vector<std::set<int> > > stdMpi(meshDistributor->getMpiComm());
     for (RankToDofContainer::iterator it = sendDofs.begin();
 	 it != sendDofs.end(); ++it)
@@ -266,6 +288,9 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::createLagrange()");
 
+    // === Reserve for each dual node, on the rank that owns this node, the ===
+    // === appropriate number of Lagrange constraints.                      ===
+
     nRankLagrange = 0;
     for (DofIndexSet::iterator it = duals.begin(); it != duals.end(); ++it) {
       if (meshDistributor->getIsRankDof(*it)) {
@@ -275,6 +300,11 @@ namespace AMDiS {
       }
     }
 
+
+    // === Get the overall number of Lagrange constraints and create the ===
+    // === mapping dofFirstLagrange, that defines for each dual boundary ===
+    // === node the first Lagrange constraint global index.              ===
+
     nOverallLagrange = 0;
     rStartLagrange = 0;
     mpi::getDofNumbering(meshDistributor->getMpiComm(),
@@ -288,7 +318,7 @@ namespace AMDiS {
 	    nRankLagrange, nOverallLagrange);
 
 
-    // === ===
+    // === Communicate dofFirstLagrange to all other ranks. ===
 
     StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm());
     RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
@@ -326,8 +356,7 @@ namespace AMDiS {
       for (unsigned int i = 0; i < it->second.size(); i++)
 	if (primals.count(*(it->second[i])) == 0)
 	  dofFirstLagrange[*(it->second[i])] = stdMpi.getRecvData(it->first)[counter++];
-    }
-     
+    }     
   }
 
 
@@ -337,12 +366,19 @@ namespace AMDiS {
 
     globalIndexB.clear();
     DOFAdmin* admin = meshDistributor->getFeSpace()->getAdmin();
+
+    // === To ensure that all interior node on each rank are listen first in ===
+    // === the global index of all B nodes, insert all interior nodes first, ===
+    // === without defining a correct index.                                 ===
     
     for (int i = 0; i < admin->getUsedSize(); i++)
       if (admin->isDofFree(i) == false && primals.count(i) == 0)
 	if (duals.count(i) == 0 && primals.count(i) == 0)
 	  globalIndexB[i] = -1;
 
+
+    // === Get correct index for all interior nodes. ===
+
     int nInterior = 0;
     for (DofMapping::iterator it = globalIndexB.begin(); 
 	 it != globalIndexB.end(); ++it) {
@@ -354,28 +390,45 @@ namespace AMDiS {
 		  static_cast<unsigned int>(admin->getUsedDofs()))
       ("Should not happen!\n");
 
+
+    // === And finally, add the global indicies of all dual nodes. ===
+
     for (DofIndexSet::iterator it = duals.begin();
 	 it != duals.end(); ++it)
       globalIndexB[*it] = globalDualIndex[*it];
   }
 
 
-  void PetscSolverFeti::createMatLagrange(int nComponents)
+  void PetscSolverFeti::createMatLagrange()
   {
     FUNCNAME("PetscSolverFeti::createMatLagrange()");
 
+    // === Create distributed matrix for Lagrange constraints. ===
+
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
-		    nRankLagrange * nComponents, nRankB * nComponents,
-		    nOverallLagrange * nComponents, nOverallB * nComponents,
+		    nRankLagrange * nComponents, 
+		    nRankB * nComponents,
+		    nOverallLagrange * nComponents, 
+		    nOverallB * nComponents,
 		    2, PETSC_NULL, 2, PETSC_NULL,
 		    &mat_lagrange);
 
+    // === Create for all duals the corresponding Lagrange constraints. On ===
+    // === each rank we traverse all pairs (n, m) of ranks, with n < m,    ===
+    // === that contain this node. If the current rank number is r, and    ===
+    // === n == r, the rank sets 1.0 for the corresponding constraint, if  ===
+    // === m == r, than the rank sets -1.0 for the corresponding           ===
+    // === constraint.                                                     ===
+
     for (DofIndexSet::iterator it = duals.begin(); it != duals.end(); ++it) {
       TEST_EXIT_DBG(dofFirstLagrange.count(*it))("Should not happen!\n");
       TEST_EXIT_DBG(boundaryDofRanks.count(*it))("Should not happen!\n");
 
+      // Global index of the first Lagrange constriant for this node.
       int index = dofFirstLagrange[*it];
+      // Copy set of all ranks that contain this dual node.
       vector<int> W(boundaryDofRanks[*it].begin(), boundaryDofRanks[*it].end());
+      // Number of ranks that contain this dual node.
       int degree = W.size();
 
       TEST_EXIT_DBG(globalDualIndex.count(*it))("Should not happen!\n");
@@ -383,7 +436,8 @@ namespace AMDiS {
 
       for (int i = 0; i < degree; i++) {
 	for (int j = i + 1; j < degree; j++) {
-	  if (W[i] == mpiRank || W[j] == mpiRank) {	      
+	  if (W[i] == mpiRank || W[j] == mpiRank) {
+	    // Set the constraint for all components of the system.
 	    for (int k = 0; k < nComponents; k++) {
 	      int rowIndex = index * nComponents + k;
 	      int colIndex = dualCol * nComponents + k;
@@ -403,7 +457,7 @@ namespace AMDiS {
   }
 
 
-  void PetscSolverFeti::createSchurPrimalKsp(int nComponents)
+  void PetscSolverFeti::createSchurPrimalKsp()
   {
     FUNCNAME("PetscSolverFeti::createSchurPrimal()");
 
@@ -412,10 +466,8 @@ namespace AMDiS {
     petscSchurPrimalData.mat_b_primal = &mat_b_primal;
     petscSchurPrimalData.ksp_b = &ksp_b;
 
-    MatGetVecs(mat_b_b, 
-	       PETSC_NULL, &(petscSchurPrimalData.tmp_vec_b));
-    MatGetVecs(mat_primal_primal, 
-	       PETSC_NULL, &(petscSchurPrimalData.tmp_vec_primal));
+    VecDuplicate(f_b, &(petscSchurPrimalData.tmp_vec_b));
+    VecDuplicate(f_primal, &(petscSchurPrimalData.tmp_vec_primal));
 
     MatCreateShell(PETSC_COMM_WORLD,
 		   nRankPrimals * nComponents, nRankPrimals * nComponents,
@@ -461,8 +513,8 @@ namespace AMDiS {
     petscFetiData.ksp_schur_primal = &ksp_schur_primal;
 
 
-    MatGetVecs(mat_b_b, PETSC_NULL, &(petscFetiData.tmp_vec_b));
-    MatGetVecs(mat_primal_primal, PETSC_NULL, &(petscFetiData.tmp_vec_primal));
+    VecDuplicate(f_b, &(petscFetiData.tmp_vec_b));
+    VecDuplicate(f_primal, &(petscFetiData.tmp_vec_primal));
     MatGetVecs(mat_lagrange, PETSC_NULL, &(petscFetiData.tmp_vec_lagrange));
 
 
@@ -491,12 +543,10 @@ namespace AMDiS {
     petscFetiData.ksp_b = PETSC_NULL;
     petscFetiData.ksp_schur_primal = PETSC_NULL;
 
-
     VecDestroy(petscFetiData.tmp_vec_b);
     VecDestroy(petscFetiData.tmp_vec_primal);
     VecDestroy(petscFetiData.tmp_vec_lagrange);
 
-
     MatDestroy(mat_feti);
     KSPDestroy(ksp_feti);
   }
@@ -508,15 +558,14 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::recoverSolution()");
 
-    int nComponents = vec.getSize();
-
-    // === ===
+    // === Get local part of the solution for B variables. ===
 
     PetscScalar *localSolB;
     VecGetArray(vec_sol_b, &localSolB);
 
 
-    // === ===
+    // === Create scatter to get solutions of all primal nodes that are ===
+    // === contained in rank's domain.                                  ===
     
     vector<PetscInt> globalIsIndex, localIsIndex;
     globalIsIndex.reserve(globalPrimalIndex.size() * nComponents);
@@ -549,7 +598,6 @@ namespace AMDiS {
     Vec local_sol_primal;
     VecCreateSeq(PETSC_COMM_SELF, localIsIndex.size(), &local_sol_primal);
 
-
     VecScatter primalScatter;
     VecScatterCreate(vec_sol_primal, globalIs, local_sol_primal, localIs, &primalScatter);
     VecScatterBegin(primalScatter, vec_sol_primal, local_sol_primal, 
@@ -561,12 +609,11 @@ namespace AMDiS {
     ISDestroy(localIs);    
     VecScatterDestroy(primalScatter);    
 
-
     PetscScalar *localSolPrimal;
     VecGetArray(local_sol_primal, &localSolPrimal);
 
 
-
+    // === And copy from PETSc local vectors to the DOF vectors. ===
 
     for (int i = 0; i < nComponents; i++) {
       DOFVector<double>& dofVec = *(vec.getDOFVector(i));
@@ -588,7 +635,6 @@ namespace AMDiS {
 
 
     VecRestoreArray(vec_sol_b, &localSolB);
-
     VecRestoreArray(local_sol_primal, &localSolPrimal);
     VecDestroy(local_sol_primal);
   }
@@ -599,33 +645,41 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::fillPetscMatrix()");   
 
+    nComponents = vec->getSize();
+
+    // === Create all sets and indices. ===
+
     updateDofData();
 
-    int nComponents = vec->getSize();
+
+    // === Create matrices for the FETI-DP method. ===
+
+    int nRowsRankB = nRankB * nComponents;
+    int nRowsOverallB = nOverallB * nComponents;
+    int nRowsRankPrimal = nRankPrimals * nComponents;
+    int nRowsOverallPrimal = nOverallPrimals * nComponents;
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
-		    nRankB * nComponents, nRankB * nComponents,
-		    nOverallB * nComponents, nOverallB * nComponents,
-		    100, PETSC_NULL, 100, PETSC_NULL, 
-		    &mat_b_b);
+		    nRowsRankB, nRowsRankB, nRowsOverallB, nRowsOverallB,
+		    100, PETSC_NULL, 100, PETSC_NULL, &mat_b_b);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
-		    nRankPrimals * nComponents, nRankPrimals * nComponents,
-		    nOverallPrimals * nComponents, nOverallPrimals * nComponents,
-		    10, PETSC_NULL, 10, PETSC_NULL, 
-		    &mat_primal_primal);
+		    nRowsRankPrimal, nRowsRankPrimal, 
+		    nRowsOverallPrimal, nRowsOverallPrimal,
+		    10, PETSC_NULL, 10, PETSC_NULL, &mat_primal_primal);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
-		    nRankB * nComponents, nRankPrimals * nComponents,
-		    nOverallB * nComponents, nOverallPrimals * nComponents,
-		    100, PETSC_NULL, 100, PETSC_NULL, 
-		    &mat_b_primal);
+		    nRowsRankB, nRowsRankPrimal, 
+		    nRowsOverallB, nRowsOverallPrimal,
+		    100, PETSC_NULL, 100, PETSC_NULL, &mat_b_primal);
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
-		    nRankPrimals * nComponents, nRankB * nComponents,
-		    nOverallPrimals * nComponents, nOverallB * nComponents,
-		    100, PETSC_NULL, 100, PETSC_NULL, 
-		    &mat_primal_b);
+		    nRowsRankPrimal, nRowsRankB,
+		    nRowsOverallPrimal, nRowsOverallB,
+		    100, PETSC_NULL, 100, PETSC_NULL, &mat_primal_b);
+
+    
+    // === Prepare traverse of sequentially created matrices. ===
 
     using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
     namespace traits = mtl::traits;
@@ -641,79 +695,95 @@ namespace AMDiS {
     values.reserve(300);
     valuesOther.reserve(300);
 
-    for (int i = 0; i < nComponents; i++)
-      for (int j = 0; j < nComponents; j++)
-	if ((*mat)[i][j]) {
-	  traits::col<Matrix>::type col((*mat)[i][j]->getBaseMatrix());
-	  traits::const_value<Matrix>::type value((*mat)[i][j]->getBaseMatrix());
-
-	  for (cursor_type cursor = begin<row>((*mat)[i][j]->getBaseMatrix()), 
-		 cend = end<row>((*mat)[i][j]->getBaseMatrix()); cursor != cend; ++cursor) {
-	    bool rowPrimal = primals.count(*cursor) != 0;
-
-	    cols.clear();
-	    values.clear();
-
-	    colsOther.clear();
-	    valuesOther.clear();
-
-	    for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-		 icursor != icend; ++icursor) {
-	      if (primals.count(col(*icursor)) != 0) {
-		TEST_EXIT_DBG(globalPrimalIndex.count(col(*icursor)))
-		  ("No global primal index for DOF %d!\n", col(*icursor));
-
-		int colIndex = globalPrimalIndex[col(*icursor)] * nComponents + j;
-
-		if (rowPrimal) {
-		  cols.push_back(colIndex);
-		  values.push_back(value(*icursor));
-		} else {
-		  colsOther.push_back(colIndex);
-		  valuesOther.push_back(value(*icursor));
-		}
+
+    // === Traverse all sequentially created matrices and add the values to ===
+    // === the global PETSc matrices.                                       ===
+
+    for (int i = 0; i < nComponents; i++) {
+      for (int j = 0; j < nComponents; j++) {
+	if (!(*mat)[i][j])
+	  continue;
+
+	traits::col<Matrix>::type col((*mat)[i][j]->getBaseMatrix());
+	traits::const_value<Matrix>::type value((*mat)[i][j]->getBaseMatrix());
+	
+	// Traverse all rows.
+	for (cursor_type cursor = begin<row>((*mat)[i][j]->getBaseMatrix()), 
+	       cend = end<row>((*mat)[i][j]->getBaseMatrix()); cursor != cend; ++cursor) {
+	  bool rowPrimal = primals.count(*cursor) != 0;
+	  
+	  cols.clear();
+	  values.clear();
+	  
+	  colsOther.clear();
+	  valuesOther.clear();
+	  
+	  // Traverse all columns.
+	  for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	       icursor != icend; ++icursor) {
+
+	    if (primals.count(col(*icursor)) != 0) {
+	      // Column is a primal variable.
+
+	      TEST_EXIT_DBG(globalPrimalIndex.count(col(*icursor)))
+		("No global primal index for DOF %d!\n", col(*icursor));
+	      
+	      int colIndex = globalPrimalIndex[col(*icursor)] * nComponents + j;
+	      
+	      if (rowPrimal) {
+		cols.push_back(colIndex);
+		values.push_back(value(*icursor));
 	      } else {
-		TEST_EXIT_DBG(globalIndexB.count(col(*icursor)))
-		  ("No global B index for DOF %d!\n", col(*icursor));
+		colsOther.push_back(colIndex);
+		valuesOther.push_back(value(*icursor));
+	      }
+	    } else {
+	      // Column is not a primal variable.
+
+	      TEST_EXIT_DBG(globalIndexB.count(col(*icursor)))
+		("No global B index for DOF %d!\n", col(*icursor));
 	      
-		int colIndex = globalIndexB[col(*icursor)] * nComponents + j;
-
-		if (rowPrimal) {
-		  colsOther.push_back(colIndex);
-		  valuesOther.push_back(value(*icursor));
-		} else {
-		  cols.push_back(colIndex);
-		  values.push_back(value(*icursor));
-		}
+	      int colIndex = globalIndexB[col(*icursor)] * nComponents + j;
+
+	      if (rowPrimal) {
+		colsOther.push_back(colIndex);
+		valuesOther.push_back(value(*icursor));
+	      } else {
+		cols.push_back(colIndex);
+		values.push_back(value(*icursor));
 	      }
 	    }
+	  }
 
-	    if (rowPrimal) {
-	      TEST_EXIT_DBG(globalPrimalIndex.count(*cursor))
-		("Should not happen!\n");
+	  if (rowPrimal) {
+	    TEST_EXIT_DBG(globalPrimalIndex.count(*cursor))
+	      ("Should not happen!\n");
 
- 	      int rowIndex = globalPrimalIndex[*cursor] * nComponents + i;
- 	      MatSetValues(mat_primal_primal, 1, &rowIndex, cols.size(),
- 			   &(cols[0]), &(values[0]), ADD_VALUES);
+	    int rowIndex = globalPrimalIndex[*cursor] * nComponents + i;
+	    MatSetValues(mat_primal_primal, 1, &rowIndex, cols.size(),
+			 &(cols[0]), &(values[0]), ADD_VALUES);
 
-   	      if (colsOther.size())
-  		MatSetValues(mat_primal_b, 1, &rowIndex, colsOther.size(),
-  			     &(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
-	    } else {
-	      TEST_EXIT_DBG(globalIndexB.count(*cursor))
-		("Should not happen!\n");
+	    if (colsOther.size())
+	      MatSetValues(mat_primal_b, 1, &rowIndex, colsOther.size(),
+			   &(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
+	  } else {
+	    TEST_EXIT_DBG(globalIndexB.count(*cursor))
+	      ("Should not happen!\n");
 
-	      int rowIndex = globalIndexB[*cursor] * nComponents + i;
-	      MatSetValues(mat_b_b, 1, &rowIndex, cols.size(),
-			   &(cols[0]), &(values[0]), ADD_VALUES);
+	    int rowIndex = globalIndexB[*cursor] * nComponents + i;
+	    MatSetValues(mat_b_b, 1, &rowIndex, cols.size(),
+			 &(cols[0]), &(values[0]), ADD_VALUES);
 
- 	      if (colsOther.size())
- 		MatSetValues(mat_b_primal, 1, &rowIndex, colsOther.size(),
- 			     &(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
-	    }
-	  } 
-	}
+	    if (colsOther.size())
+	      MatSetValues(mat_b_primal, 1, &rowIndex, colsOther.size(),
+			   &(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
+	  }
+	} 
+      }
+    }
+    
 
+    // === Start global assembly procedure. ===
 
     MatAssemblyBegin(mat_b_b, MAT_FINAL_ASSEMBLY);
     MatAssemblyEnd(mat_b_b, MAT_FINAL_ASSEMBLY);
@@ -728,7 +798,7 @@ namespace AMDiS {
     MatAssemblyEnd(mat_primal_b, MAT_FINAL_ASSEMBLY);
 	  
 
-    // === ===
+    // === Create and fill PETSc's right hand side vectors. ===
 
     VecCreate(PETSC_COMM_WORLD, &f_b);
     VecSetSizes(f_b, nRankB * nComponents, nOverallB * nComponents);
@@ -768,262 +838,287 @@ namespace AMDiS {
     VecAssemblyEnd(f_primal);
 
 
-    createMatLagrange(nComponents);
+    // === Create and fill PETSc matrix for Lagrange constraints. ===
 
+    createMatLagrange();
 
     
-    createSchurPrimalKsp(nComponents);
+    // === Create PETSc solver for the Schur complement on primal variables. ===
+    
+    createSchurPrimalKsp();
+
+
+    // === Create PETSc solver for the FETI-DP operator. ===
 
     createFetiKsp();
   }
 
 
-  void PetscSolverFeti::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
+  void PetscSolverFeti::solveFetiMatrix(SystemVector &vec)
   {
-    FUNCNAME("PetscSolverFeti::solvePetscMatrix()");
+    FUNCNAME("PetscSolverFeti::solveFetiMatrix()");
 
-    int debug = 0;
-    Parameters::get("parallel->debug feti", debug);
+    // Create transpose of Lagrange matrix.
+    Mat mat_lagrange_transpose;
+    MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &mat_lagrange_transpose);
 
-    if (debug) {
-      int nComponents = vec.getSize();
-
-      Mat mat_lagrange_transpose;
-      MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &mat_lagrange_transpose);
-
-      Mat A;
-      Mat nestedA[3][3];
-      nestedA[0][0] = mat_b_b;
-      nestedA[0][1] = mat_b_primal;
-      nestedA[0][2] = mat_lagrange_transpose;
-      nestedA[1][0] = mat_primal_b;
-      nestedA[1][1] = mat_primal_primal;
-      nestedA[1][2] = PETSC_NULL;
-      nestedA[2][0] = mat_lagrange;
-      nestedA[2][1] = PETSC_NULL;
-      nestedA[2][2] = PETSC_NULL;
-
-      MatCreateNest(PETSC_COMM_WORLD, 3, PETSC_NULL, 3, PETSC_NULL, &(nestedA[0][0]), &A);
-
-      MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
-      MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
-  
 
+    // === Create nested matrix which will contain the overall FETI system. ===
 
-      int nRankNest = (nRankB + nRankPrimals) * nComponents + nRankLagrange;    
-      int nOverallNest = (nOverallB + nOverallPrimals) * nComponents + nOverallLagrange;
-      int rStartNest = (rStartB + rStartPrimals) * nComponents + rStartLagrange;
+    Mat A;
+    Mat nestedA[3][3];
+    nestedA[0][0] = mat_b_b;
+    nestedA[0][1] = mat_b_primal;
+    nestedA[0][2] = mat_lagrange_transpose;
+    nestedA[1][0] = mat_primal_b;
+    nestedA[1][1] = mat_primal_primal;
+    nestedA[1][2] = PETSC_NULL;
+    nestedA[2][0] = mat_lagrange;
+    nestedA[2][1] = PETSC_NULL;
+    nestedA[2][2] = PETSC_NULL;
 
-      {
-	int matRow, matCol;
-	MatGetLocalSize(A, &matRow, &matCol);
-	TEST_EXIT_DBG(matRow == nRankNest)("Should not happen!\n");
-	mpi::globalAdd(matRow);
-	TEST_EXIT_DBG(matRow == nOverallNest)("Should not happen!\n");
+    MatCreateNest(PETSC_COMM_WORLD, 3, PETSC_NULL, 3, PETSC_NULL, &(nestedA[0][0]), &A);
 
-	MatGetOwnershipRange(A, &matRow, &matCol);
-	TEST_EXIT_DBG(matRow == rStartNest)("Should not happen!\n");
-      }
+    MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
+  
 
-      Vec f;
-      VecCreate(PETSC_COMM_WORLD, &f);
-      VecSetSizes(f, nRankNest, nOverallNest);
-      VecSetType(f, VECMPI);
 
-      Vec b;
-      VecDuplicate(f, &b);
+    int nRankNest = (nRankB + nRankPrimals) * nComponents + nRankLagrange;    
+    int nOverallNest = (nOverallB + nOverallPrimals) * nComponents + nOverallLagrange;
+    int rStartNest = (rStartB + rStartPrimals) * nComponents + rStartLagrange;
 
-      PetscScalar *local_f_b;
-      VecGetArray(f_b, &local_f_b);
+    {
+      // === Test some matrix sizes. ===
 
-      {
-	int size;
-	VecGetLocalSize(f_b, &size);
-	TEST_EXIT_DBG(size == nRankB * nComponents)("Should not happen!\n");
-      }
+      int matRow, matCol;
+      MatGetLocalSize(A, &matRow, &matCol);
+      TEST_EXIT_DBG(matRow == nRankNest)("Should not happen!\n");
+      mpi::globalAdd(matRow);
+      TEST_EXIT_DBG(matRow == nOverallNest)("Should not happen!\n");
 
-      PetscScalar *local_f_primal;
-      VecGetArray(f_primal, &local_f_primal);
+      MatGetOwnershipRange(A, &matRow, &matCol);
+      TEST_EXIT_DBG(matRow == rStartNest)("Should not happen!\n");
+    }
 
-      {
-	int size;
-	VecGetLocalSize(f_primal, &size);
-	TEST_EXIT_DBG(size == nRankPrimals * nComponents)("Should not happen!\n");
-      }
+    // === Create rhs and solution vectors for the overall FETI system. ===
+
+    Vec f;
+    VecCreate(PETSC_COMM_WORLD, &f);
+    VecSetSizes(f, nRankNest, nOverallNest);
+    VecSetType(f, VECMPI);
+
+    Vec b;
+    VecDuplicate(f, &b);
+
+    
+    // === Fill rhs vector by coping the primal and non primal PETSc vectors. ===
 
-      PetscScalar *local_f;
-      VecGetArray(f, &local_f);
+    PetscScalar *local_f_b;
+    VecGetArray(f_b, &local_f_b);
 
-      int index = 0;
-      for (int i = 0; i < nRankB * nComponents; i++)
-	local_f[index++] = local_f_b[i];
-      for (int i = 0; i < nRankPrimals * nComponents; i++)
-	local_f[index++] = local_f_primal[i];
+    PetscScalar *local_f_primal;
+    VecGetArray(f_primal, &local_f_primal);
 
-      VecRestoreArray(f, &local_f);  
-      VecRestoreArray(f_b, &local_f_b);
-      VecRestoreArray(f_primal, &local_f_primal);
+    {
+      int size;
+      VecGetLocalSize(f_b, &size);
+      TEST_EXIT_DBG(size == nRankB * nComponents)("Should not happen!\n");
+      VecGetLocalSize(f_primal, &size);
+      TEST_EXIT_DBG(size == nRankPrimals * nComponents)("Should not happen!\n");
+    }
 
+    PetscScalar *local_f;
+    VecGetArray(f, &local_f);
 
-      KSP ksp;
-      KSPCreate(PETSC_COMM_WORLD, &ksp);
-      KSPSetOperators(ksp, A, A, SAME_NONZERO_PATTERN);
-      KSPSetFromOptions(ksp);
+    int index = 0;
+    for (int i = 0; i < nRankB * nComponents; i++)
+      local_f[index++] = local_f_b[i];
+    for (int i = 0; i < nRankPrimals * nComponents; i++)
+      local_f[index++] = local_f_primal[i];
 
+    VecRestoreArray(f, &local_f);  
+    VecRestoreArray(f_b, &local_f_b);
+    VecRestoreArray(f_primal, &local_f_primal);
 
+    
+    // === Create solver and solve the overall FETI system. ===
 
-      KSPSolve(ksp, f, b);
+    KSP ksp;
+    KSPCreate(PETSC_COMM_WORLD, &ksp);
+    KSPSetOperators(ksp, A, A, SAME_NONZERO_PATTERN);
+    KSPSetFromOptions(ksp);
 
 
+    KSPSolve(ksp, f, b);
 
 
-      Vec u_b, u_primal;
-      VecDuplicate(f_b, &u_b);
-      VecDuplicate(f_primal, &u_primal);
+    // === Reconstruct FETI solution vectors. ===
+    
+    Vec u_b, u_primal;
+    VecDuplicate(f_b, &u_b);
+    VecDuplicate(f_primal, &u_primal);
     
 
-      PetscScalar *local_b;
-      VecGetArray(b, &local_b);
+    PetscScalar *local_b;
+    VecGetArray(b, &local_b);
 
-      PetscScalar *local_u_b;
-      VecGetArray(u_b, &local_u_b);
+    PetscScalar *local_u_b;
+    VecGetArray(u_b, &local_u_b);
 
-      PetscScalar *local_u_primal;
-      VecGetArray(u_primal, &local_u_primal);
+    PetscScalar *local_u_primal;
+    VecGetArray(u_primal, &local_u_primal);
 
-      index = 0;
-      for (int i = 0; i < nRankB * nComponents; i++)
-	local_u_b[i] = local_b[index++];
-      for (int i = 0; i < nRankPrimals * nComponents; i++)
-	local_u_primal[i] = local_b[index++];
-      for (int i = 0; i < nRankLagrange; i++) {
-	MSG("MY %d-ith Lagrange: %f\n", i, local_b[index++]);
-      }
+    index = 0;
+    for (int i = 0; i < nRankB * nComponents; i++)
+      local_u_b[i] = local_b[index++];
+    for (int i = 0; i < nRankPrimals * nComponents; i++)
+      local_u_primal[i] = local_b[index++];
 
-      VecRestoreArray(f, &local_b);
-      VecRestoreArray(u_b, &local_u_b);
-      VecRestoreArray(u_primal, &local_u_primal);
+    VecRestoreArray(f, &local_b);
+    VecRestoreArray(u_b, &local_u_b);
+    VecRestoreArray(u_primal, &local_u_primal);
 
+    recoverSolution(u_b, u_primal, vec);
 
-      recoverSolution(u_b, u_primal, vec);
+    VecDestroy(u_b);
+    VecDestroy(u_primal);
+    VecDestroy(b);
+    VecDestroy(f);
 
-      VecDestroy(u_b);
-      VecDestroy(u_primal);
-      VecDestroy(b);
-      VecDestroy(f);
+    KSPDestroy(ksp);
+  }
 
-      KSPDestroy(ksp);
 
-    } else {
+  void PetscSolverFeti::solveReducedFetiMatrix(SystemVector &vec)
+  {
+    FUNCNAME("PetscSolverFeti::solveReducedFetiMatrix()");
 
-      KSPCreate(PETSC_COMM_WORLD, &ksp_b);
-      KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN);
-      KSPSetOptionsPrefix(ksp_b, "solver_b_");
-      KSPSetFromOptions(ksp_b);
+    // === Create solver for the non primal (thus local) variables. ===
 
-    
-      Vec vec_rhs;
+    KSPCreate(PETSC_COMM_WORLD, &ksp_b);
+    KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN);
+    KSPSetOptionsPrefix(ksp_b, "solver_b_");
+    KSPSetFromOptions(ksp_b);
+
+    // RHS and solution vector.
+    Vec vec_rhs;
 
-      Vec tmp_b0, tmp_b1, tmp_lagrange0, tmp_primal0, tmp_primal1;
-      MatGetVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange0);
-      MatGetVecs(mat_lagrange, PETSC_NULL, &vec_rhs);
-      MatGetVecs(mat_b_b, PETSC_NULL, &tmp_b0);
-      MatGetVecs(mat_b_b, PETSC_NULL, &tmp_b1);
-      MatGetVecs(mat_primal_primal, PETSC_NULL, &tmp_primal0);
-      MatGetVecs(mat_primal_primal, PETSC_NULL, &tmp_primal1);
+    // Some temporary vectors.
+    Vec tmp_b0, tmp_b1, tmp_lagrange0, tmp_primal0, tmp_primal1;
+    MatGetVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange0);
+    MatGetVecs(mat_lagrange, PETSC_NULL, &vec_rhs);
+    MatGetVecs(mat_b_b, PETSC_NULL, &tmp_b0);
+    MatGetVecs(mat_b_b, PETSC_NULL, &tmp_b1);
+    MatGetVecs(mat_primal_primal, PETSC_NULL, &tmp_primal0);
+    MatGetVecs(mat_primal_primal, PETSC_NULL, &tmp_primal1);
 
 
-      // === Create new rhs ===
+    // === Create new rhs ===
 
-      // vec_rhs = L * inv(K_BB) * f_b
-      KSPSolve(ksp_b, f_b, tmp_b0);
-      MatMult(mat_lagrange, tmp_b0, vec_rhs);
+    // vec_rhs = L * inv(K_BB) * f_b
+    KSPSolve(ksp_b, f_b, tmp_b0);
+    MatMult(mat_lagrange, tmp_b0, vec_rhs);
 
-      // tmp_primal0 = M_PiB * inv(K_BB) * f_b
-      MatMult(mat_primal_b, tmp_b0, tmp_primal0);
+    // tmp_primal0 = M_PiB * inv(K_BB) * f_b
+    MatMult(mat_primal_b, tmp_b0, tmp_primal0);
 
-      // tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_b
-      //      VecAXPBY(tmp_primal0, 1.0, -1.0, f_primal);
-      VecAXPBY(tmp_primal0, -1.0, 1.0, f_primal);
+    // tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_b
+    VecAXPBY(tmp_primal0, -1.0, 1.0, f_primal);
 
-      // tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_b)
-      KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
+    // tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_b)
+    KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
 
-      //
-      MatMult(mat_b_primal, tmp_primal0, tmp_b0);
-      KSPSolve(ksp_b, tmp_b0, tmp_b0);
-      MatMult(mat_lagrange, tmp_b0, tmp_lagrange0);
+    //
+    MatMult(mat_b_primal, tmp_primal0, tmp_b0);
+    KSPSolve(ksp_b, tmp_b0, tmp_b0);
+    MatMult(mat_lagrange, tmp_b0, tmp_lagrange0);
 
-      //
-      VecAXPBY(vec_rhs, 1.0, 1.0, tmp_lagrange0);
+    //
+    VecAXPBY(vec_rhs, 1.0, 1.0, tmp_lagrange0);
 
 
-      // === Solve with FETI-DP operator. ===
+    // === Solve with FETI-DP operator. ===
 
-      KSPSolve(ksp_feti, vec_rhs, vec_rhs);
+    KSPSolve(ksp_feti, vec_rhs, vec_rhs);
 
    
-      // === Solve for u_primals. ===
+    // === Solve for u_primals. ===
 
-      VecCopy(f_primal, tmp_primal0);
+    VecCopy(f_primal, tmp_primal0);
 
-      KSPSolve(ksp_b, f_b, tmp_b0);
-      MatMult(mat_primal_b, tmp_b0, tmp_primal1);
+    KSPSolve(ksp_b, f_b, tmp_b0);
+    MatMult(mat_primal_b, tmp_b0, tmp_primal1);
 
-      VecAXPBY(tmp_primal0, -1.0, 1.0, tmp_primal1);
+    VecAXPBY(tmp_primal0, -1.0, 1.0, tmp_primal1);
 
-      MatMultTranspose(mat_lagrange, vec_rhs, tmp_b0);
-      KSPSolve(ksp_b, tmp_b0, tmp_b0);
-      MatMult(mat_primal_b, tmp_b0, tmp_primal1);
+    MatMultTranspose(mat_lagrange, vec_rhs, tmp_b0);
+    KSPSolve(ksp_b, tmp_b0, tmp_b0);
+    MatMult(mat_primal_b, tmp_b0, tmp_primal1);
 
-      VecAXPBY(tmp_primal0, 1.0, 1.0, tmp_primal1);
-      KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
+    VecAXPBY(tmp_primal0, 1.0, 1.0, tmp_primal1);
+    KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
 
     
-      // === Solve for u_b. ===
+    // === Solve for u_b. ===
 
-      VecCopy(f_b, tmp_b0);
-      MatMultTranspose(mat_lagrange, vec_rhs, tmp_b1);
-      VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
+    VecCopy(f_b, tmp_b0);
+    MatMultTranspose(mat_lagrange, vec_rhs, tmp_b1);
+    VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
 
-      MatMult(mat_b_primal, tmp_primal0, tmp_b1);
-      VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
+    MatMult(mat_b_primal, tmp_primal0, tmp_b1);
+    VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
 
-      KSPSolve(ksp_b, tmp_b0, tmp_b0);
+    KSPSolve(ksp_b, tmp_b0, tmp_b0);
 
 
-      // === And recover AMDiS solution vectors. ===
+    // === And recover AMDiS solution vectors. ===
     
-      recoverSolution(tmp_b0, tmp_primal0, vec);
+    recoverSolution(tmp_b0, tmp_primal0, vec);
 
 
-      // === Destroy all data structures. ===
+    // === Destroy all data structures. ===
     
-
-      VecDestroy(vec_rhs);
-      VecDestroy(tmp_b0);
-      VecDestroy(tmp_b1);
-      VecDestroy(tmp_lagrange0);
-      VecDestroy(tmp_primal0);
-      VecDestroy(tmp_primal1);
+    VecDestroy(vec_rhs);
+    VecDestroy(tmp_b0);
+    VecDestroy(tmp_b1);
+    VecDestroy(tmp_lagrange0);
+    VecDestroy(tmp_primal0);
+    VecDestroy(tmp_primal1);
 	    
 
-      KSPDestroy(ksp_b);
+    KSPDestroy(ksp_b);
 
-      MatDestroy(mat_b_b);
-      MatDestroy(mat_primal_primal);
-      MatDestroy(mat_b_primal);
-      MatDestroy(mat_primal_b);
-      MatDestroy(mat_lagrange);
+    MatDestroy(mat_b_b);
+    MatDestroy(mat_primal_primal);
+    MatDestroy(mat_b_primal);
+    MatDestroy(mat_primal_b);
+    MatDestroy(mat_lagrange);
 
-      VecDestroy(f_b);
-      VecDestroy(f_primal);
+    VecDestroy(f_b);
+    VecDestroy(f_primal);
 
-      destroySchurPrimalKsp();
+    destroySchurPrimalKsp();
 
-      destroyFetiKsp();   
-    }
+    destroyFetiKsp();   
+  }
 
+
+  void PetscSolverFeti::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
+  {
+    FUNCNAME("PetscSolverFeti::solvePetscMatrix()");
+
+    int debug = 0;
+    Parameters::get("parallel->debug feti", debug);
+
+    if (debug) {
+      WARNING("FETI matrix is solved globally, thus without reducing to the lagrange multipliers!\n");
+
+      solveFetiMatrix(vec);
+    } else {
+      solveReducedFetiMatrix(vec);
+    }
+      
   }
 #endif
 
diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h
index a033913b26f0ba2add7c2ebeb7abf9d434308ccf..5f6e96ff37be58ec3ac953ed99fc610dacedd049 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.h
+++ b/AMDiS/src/parallel/PetscSolverFeti.h
@@ -33,47 +33,81 @@ namespace AMDiS {
 
 #ifdef HAVE_PETSC_DEV
 
+  /** \brief
+   * This structure is used when defining the MatShell operation for solving 
+   * primal schur complement. \ref petscMultMatSchurPrimal
+   */
   struct PetscSchurPrimalData {
-    Mat *mat_primal_b, *mat_b_primal, *mat_primal_primal;
+    /// Pointers to the matrix containing the primal variables.
+    Mat *mat_primal_primal;
+   
+    /// Coupling matrices between the primal and the B variables.
+    Mat *mat_primal_b, *mat_b_primal;
 
+    /// Temporal vector on the B variables.
     Vec tmp_vec_b;
 
+    /// Temporal vecor in the primal variables.
     Vec tmp_vec_primal;
 
+    /// Pointer to the solver for \ref PetscSolverFeti::mat_bb.
     KSP *ksp_b;
   };
 
 
-
+  /** \brief
+   * This structure is used when defining the FETI-DP operator for solving 
+   * the system matrix reduced to the Lagrange multipliers.
+   * \ref petscMultMatFeti
+   */
   struct PetscFetiData {
-    Mat *mat_primal_b, *mat_b_primal, *mat_primal_primal;
+    /// Pointers to the matrix containing the primal variables.
+    Mat *mat_primal_primal;
+   
+    /// Coupling matrices between the primal and the B variables.
+    Mat *mat_primal_b, *mat_b_primal;
 
+    /// Matrix of Lagrange variables.
     Mat *mat_lagrange;
 
+    /// Temporal vector on the B variables.
     Vec tmp_vec_b;
 
+    /// Temporal vector on the primal variables.
     Vec tmp_vec_primal;
 
+    /// Temporal vector on the Lagrange variables.
     Vec tmp_vec_lagrange;
 
+    /// Pointer to the solver for \ref PetscSolverFeti::mat_bb.
     KSP *ksp_b;
 
+    /// Pointer to the solver of the schur complement on the primal variables.
     KSP *ksp_schur_primal;
   };
 
 
 
+  /** \brief
+   * FETI-DP implementation based on PETSc.
+   */
   class PetscSolverFeti : public PetscSolver
   {
   public:
     PetscSolverFeti() 
-      : PetscSolver()
+      : PetscSolver(),
+	nComponents(-1)
     {}
 
+    /// Assemble the sequentially created matrices and vectors to the
+    /// global matrices and vectors required by the FETI-DP method.
     void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
 
+    /// Solve the system using FETI-DP method.
     void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
 
+    /// Returns flags to denote which information of the boundary DOFs are 
+    /// required by the FETI-DP solver.
     Flag getBoundaryDofRequirement()
     {
       return 
@@ -81,95 +115,157 @@ namespace AMDiS {
 	MeshDistributor::BOUNDARY_FILL_INFO_SEND_DOFS |
 	MeshDistributor::BOUNDARY_FILL_INFO_RECV_DOFS;
     }
+
   protected:
+    /// After mesh changes, or if the solver is called the first time, this
+    /// function creates all matrix and vector objects with the approriated
+    /// sizes.
     void updateDofData();
 
+    /// Defines which boundary nodes are primal. Creates global index of
+    /// the primal variables.
     void createPrimals();
 
+    /// Defines the set of dual variables and creates the global index of
+    // dual variables.
     void createDuals();
 
+    /// Create Lagrange multiplier variables corresponding to the dual 
+    /// variables.
     void createLagrange();
 
+    /// Creates a global index of the B variables.
     void createIndexB();
 
-    void createMatLagrange(int nComponents);
-
+    /// Creates the Lagrange multiplier constraints and assembles them 
+    /// to \ref mat_lagrange.
+    void createMatLagrange();
 
+    /// Creates PETSc KSP solver object for solving the Schur complement
+    /// system on the primal variables, \ref ksp_schur_primal
+    void createSchurPrimalKsp();
 
-    void createSchurPrimalKsp(int nComponents);
-
+    /// Destroys PETSc KSP solver object \ref ksp_schur_primal
     void destroySchurPrimalKsp();
 
-
+    /// Creates PETSc KSP solver object for the FETI-DP operator, \ref ksp_feti
     void createFetiKsp();
 
+    /// Destroys FETI-DP operator, \ref ksp_feti
     void destroyFetiKsp();
 
-
+    /** \brief
+     * Recovers AMDiS solution vector from PETSc's solution vectors of the
+     * FETI-DP system. First, the B variables can locally be copied to the
+     * corresponding entries in the DOF vectors. The primal variable must
+     * be communicated such that all ranks sharing a primal get a copy of
+     * the corresponding value.
+     *
+     * \param[in]   vec_sol_b        Global PETSc vector of the solution of
+     *                               the B variables.
+     * \param[in]   vec_sol_primal   Global PETSc vector of the solution of
+     *                               the primal variables.
+     * \param[out]  vec              SystemVector containing all solution 
+     *                               DOF vectors.
+     */
     void recoverSolution(Vec &vec_sol_b,
 			 Vec &vec_sol_primal,
 			 SystemVector &vec);
 
+    /** \brief
+     * Solves the FETI-DP system globally, thus without reducing it to the 
+     * Lagrange multipliers. This should be used for debugging only to test
+     * if the FETI-DP system is setup correctly.
+     *
+     * \param[out]  vec    Solution DOF vectors.
+     */
+    void solveFetiMatrix(SystemVector &vec);
+
+    /** \brief
+     * Solves the FETI-DP system with reducing it first to the Lagrange
+     * multipliers. This is what one expects when using the FETI-DP methid :)
+     *
+     * \param[out]  vec    Solution DOF vectors.
+     */
+    void solveReducedFetiMatrix(SystemVector &vec);
+
   protected:
+    /// Number of components in the PDE to be solved.
+    int nComponents;
+
+    /// Set of DOF indices that are considered to be primal variables.
     DofIndexSet primals;
 
+    /// Mapping from primal DOF indices to a global index of primals.
     DofMapping globalPrimalIndex;
 
-    int nRankPrimals;
-
-    int nOverallPrimals;
-
-    int rStartPrimals;
+    /// Number of rank owned primals and global primals
+    int nRankPrimals, nOverallPrimals, rStartPrimals;
 
+    /// Set of DOF indices that are considered to be dual variables.
     DofIndexSet duals;
     
+    /// Mapping from dual DOF indices to a global index of duals.
     DofMapping globalDualIndex;
 
+    /// Stores to each dual boundary DOF the set of ranks in which the DOF
+    /// is contained in.
     DofIndexToPartitions boundaryDofRanks;
 
+    /// Stores to each dual DOF index the index of the first Lagrange
+    /// constraint that is assigned to this DOF.
     DofMapping dofFirstLagrange;
 
-    int nRankLagrange;
-
-    int nOverallLagrange;
-
-    int rStartLagrange;
+    /// Number of rank owned Lagrange variables, number of global
+    /// Lagrange variables.
+    int nRankLagrange, nOverallLagrange, rStartLagrange;
     
+    /// Index for each non primal variables to the global index of
+    /// B variables.
     DofMapping globalIndexB;
 
-    int nRankB;
-
-    int nOverallB;
-    
-    int rStartB;    
+    /// Number of non primal, thus B, variables on rank and globally.
+    int nRankB, nOverallB, rStartB;    
 
+    /// Global PETSc matrix of non primal variables.
     Mat mat_b_b;
 
+    /// Global PETSc matrix of primal variables.
     Mat mat_primal_primal;
 
+    /// Global PETSc matrices that connect the primal with the non 
+    /// primal variables.
     Mat mat_b_primal, mat_primal_b;
 
+    /// Global PETSc matrix of Lagrange variables.
     Mat mat_lagrange;
 
+    /// Right hand side PETSc vectors for primal and non primal variables.
     Vec f_b, f_primal;
 
+    /// PETSc solver object that inverts the matrix of non primal 
+    /// variables, \ref mat_b_b
     KSP ksp_b;
 
-
-
-
+    /// PETSc solver object to solve the Schur complement on the 
+    /// primal variables.
     KSP ksp_schur_primal;
 
+    /// Matrix object that defines a matrix-free implementation for the action
+    /// of the Schur complement on the primal variables.
     Mat mat_schur_primal;
 
+    /// Data for MatMult operation in matrix \ref mat_schur_primal
     PetscSchurPrimalData petscSchurPrimalData;
 
-
-
+    /// PETSc solver object to solve a system with FETI-DP.    
     KSP ksp_feti;
 
+    /// Matrix object that defines a matrix-free implementation for the action
+    /// of the FETI-DP operator.
     Mat mat_feti;
 
+    /// Data for MatMult operation in matrix \ref mat_feti
     PetscFetiData petscFetiData;
   };
 #endif