From 6ef1e048b04752071d015c4da142ef294b7ab889 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Tue, 5 Apr 2011 11:39:42 +0000
Subject: [PATCH] Some work on nested schur complement approach.

---
 AMDiS/src/AMDiS.h                      |   2 +-
 AMDiS/src/ProblemVec.cc                |   5 +
 AMDiS/src/Serializer.h                 |   4 +
 AMDiS/src/parallel/MeshDistributor.cc  |  21 +-
 AMDiS/src/parallel/MeshPartitioner.h   |   2 +
 AMDiS/src/parallel/MpiHelper.h         |  25 ++
 AMDiS/src/parallel/PetscProblemStat.cc |   1 +
 AMDiS/src/parallel/PetscProblemStat.h  |   4 +-
 AMDiS/src/parallel/PetscSolver.cc      | 343 ++++++++++++++++++++++---
 AMDiS/src/parallel/PetscSolver.h       |  76 ++++--
 10 files changed, 410 insertions(+), 73 deletions(-)

diff --git a/AMDiS/src/AMDiS.h b/AMDiS/src/AMDiS.h
index fb99516d..1adea38f 100644
--- a/AMDiS/src/AMDiS.h
+++ b/AMDiS/src/AMDiS.h
@@ -136,7 +136,7 @@
 #if HAVE_PARALLEL_MTL4
 #include "parallel/Mtl4Solver.h"
 #else
-#include "parallel/PetscSolver.h"
+#include "parallel/PetscProblemStat.h"
 #endif
 
 #endif
diff --git a/AMDiS/src/ProblemVec.cc b/AMDiS/src/ProblemVec.cc
index 78a69273..e146b9cc 100644
--- a/AMDiS/src/ProblemVec.cc
+++ b/AMDiS/src/ProblemVec.cc
@@ -1545,6 +1545,11 @@ namespace AMDiS {
 
     BoundaryType *bound = 
       useGetBound ? new BoundaryType[basisFcts->getNumber()] : NULL;
+
+    if (matrix)
+      matrix->startInsertion(matrix->getNnz());
+    if (vector)
+      vector->set(0.0);      
     
     // == Traverse mesh and assemble. ==
         
diff --git a/AMDiS/src/Serializer.h b/AMDiS/src/Serializer.h
index dbc0f30a..4bc6798c 100644
--- a/AMDiS/src/Serializer.h
+++ b/AMDiS/src/Serializer.h
@@ -23,6 +23,10 @@
 #ifndef AMDIS_SERIALIZER_H
 #define AMDIS_SERIALIZER_H
 
+#if HAVE_PARALLEL_DOMAIN_AMDIS
+#include <mpi.h>
+#endif
+
 #include <map>
 #include "boost/lexical_cast.hpp"
 
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 07866cfd..22557f1b 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -1702,35 +1702,26 @@ namespace AMDiS {
     }
 
 
-    // === Get starting position for global rank DOF ordering. ====
-
+    // Get displacment for global rank DOF ordering and global DOF number.
     nRankDofs = rankDofs.size();
-    mpiComm.Scan(&nRankDofs, &rstart, 1, MPI_INT, MPI_SUM);
-    rstart -= nRankDofs;
+    mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
 
 
-    // === Stores for all rank owned DOFs a new global index. ===
+    // Stores for all rank owned DOFs a new global index.
     DofIndexMap rankDofsNewGlobalIndex;
     for (int i = 0; i < nRankDofs; i++)
       rankDofsNewGlobalIndex[rankDofs[i]] = i + rstart;
 
 
-    // === Calculate number of overall DOFs of all partitions. ===
-
-    nOverallDofs = 0;
-    mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
-
-
     // === Send and receive new DOF indices. ===
 
 #if (DEBUG != 0)
     ParallelDebug::testDofContainerCommunication(*this, sendDofs, recvDofs);
 #endif
     
-    int i = 0;
     StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
     for (RankToDofContainer::iterator sendIt = sendDofs.begin();
-	 sendIt != sendDofs.end(); ++sendIt, i++) {
+	 sendIt != sendDofs.end(); ++sendIt) {
       stdMpi.getSendData(sendIt->first).resize(0);
       stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
       for (DofContainer::iterator dofIt = sendIt->second.begin();
@@ -1751,10 +1742,10 @@ namespace AMDiS {
 
     for (RankToDofContainer::iterator recvIt = recvDofs.begin();
 	 recvIt != recvDofs.end(); ++recvIt) {      
-      int j = 0;
+      int i = 0;
       for (DofContainer::iterator dofIt = recvIt->second.begin();
 	   dofIt != recvIt->second.end(); ++dofIt) {
-	rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[j++];
+	rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[i++];
 	isRankDof[**dofIt] = false;
       }
     }
diff --git a/AMDiS/src/parallel/MeshPartitioner.h b/AMDiS/src/parallel/MeshPartitioner.h
index e5e2f4fc..0b07124d 100644
--- a/AMDiS/src/parallel/MeshPartitioner.h
+++ b/AMDiS/src/parallel/MeshPartitioner.h
@@ -25,6 +25,8 @@
 
 #include <map>
 #include <set>
+#include <mpi.h>
+
 #include "AMDiS_fwd.h"
 #include "Mesh.h"
 
diff --git a/AMDiS/src/parallel/MpiHelper.h b/AMDiS/src/parallel/MpiHelper.h
index e178af3e..9c5e88a1 100644
--- a/AMDiS/src/parallel/MpiHelper.h
+++ b/AMDiS/src/parallel/MpiHelper.h
@@ -48,6 +48,31 @@ namespace AMDiS {
     {
       srand(time(NULL) * (MPI::COMM_WORLD.Get_rank() + 1));
     }
+
+    /** \brief
+     * In many situations a rank computes a number of local DOFs. Then all
+     * ranks want to know the number of global DOFs and the starting 
+     * displacment number of the DOF numbering in each rank.
+     *
+     * \param[in]   mpiComm        The MPI communicator.
+     * \param[in]   nRankDofs      The number of local DOFs.
+     * \param[out]  rStartDofs     Displacment of the DOF numbering. On rank n
+     *                             this is the sum of all local DOF numbers in
+     *                             ranks 0 to n - 1.
+     * \param[out]  nOverallDofs   Global sum of nRankDofs. Is equal on all
+     *                             ranks.
+     */
+    inline void getDofNumbering(MPI::Intracomm& mpiComm,
+				int nRankDofs, 
+				int& rStartDofs, 
+				int& nOverallDofs)
+    {
+      rStartDofs = 0;
+      nOverallDofs = 0;
+      mpiComm.Scan(&nRankDofs, &rStartDofs, 1, MPI_INT, MPI_SUM);
+      rStartDofs -= nRankDofs;
+      mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
+    }
   }
 
 }
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
index 78c6b0eb..2590d4d6 100644
--- a/AMDiS/src/parallel/PetscProblemStat.cc
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -29,6 +29,7 @@ namespace AMDiS {
 
     double wtime = MPI::Wtime();
 
+    petscSolver->setMeshDistributor(meshDistributor);
     petscSolver->fillPetscMatrix(systemMatrix, rhs);
     petscSolver->solvePetscMatrix(*solution, adaptInfo);   
 
diff --git a/AMDiS/src/parallel/PetscProblemStat.h b/AMDiS/src/parallel/PetscProblemStat.h
index 73eacb29..71d8e9e4 100644
--- a/AMDiS/src/parallel/PetscProblemStat.h
+++ b/AMDiS/src/parallel/PetscProblemStat.h
@@ -42,12 +42,12 @@ namespace AMDiS {
       
       if (name == "petsc-schur") {
 #ifdef HAVE_PETSC_DEV
-	petscSolver = new PetscSolverSchur(meshDistributor);
+	petscSolver = new PetscSolverSchur();
 #else
 	ERROR_EXIT("Petsc schur complement solver is only supported when petsc-dev is used!\n");
 #endif
       } else if (name == "petsc" || name == "") {
-	petscSolver = new PetscSolverGlobalMatrix(meshDistributor);
+	petscSolver = new PetscSolverGlobalMatrix();
       } else {
 	ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
       }
diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index b3cc59fa..1b8672d0 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -12,6 +12,7 @@
 
 #include "parallel/PetscSolver.h"
 #include "parallel/StdMpi.h"
+#include "parallel/MpiHelper.h"
 
 namespace AMDiS {
 
@@ -30,6 +31,10 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
 
+    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
+    TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
+    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
+
     double wtime = MPI::Wtime();
     int nComponents = mat->getNumRows();
     int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
@@ -233,9 +238,9 @@ namespace AMDiS {
     for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
 	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
 
-      // Global index of the current row dof.
+      // Global index of the current row DOF.
       int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-      // Test if the current row dof is a periodic dof.
+      // Test if the current row DOF is a periodic DOF.
       bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
 
       if (!periodicRow) {
@@ -264,7 +269,7 @@ namespace AMDiS {
 
 	  if (!periodicCol) {
 	    // Calculate the exact position of the column index in the PETSc matrix.
-	    cols.push_back(globalColDof * dispMult + dispAddCol);
+	    cols.push_back(colIndex);
 	    values.push_back(value(*icursor));
 	  } else {
 	    // === Row index is not periodic, but column index is. ===
@@ -434,10 +439,10 @@ namespace AMDiS {
       if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
 	continue;
 
-      // Calculate global row index of the dof.
+      // Calculate global row index of the DOF.
       DegreeOfFreedom globalRowDof = 
 	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
-      // Calculate PETSc index of the row dof.
+      // Calculate PETSc index of the row DOF.
       int index = globalRowDof * dispMult + dispAdd;
 
       if (meshDistributor->isPeriodicDof(globalRowDof)) {
@@ -451,7 +456,7 @@ namespace AMDiS {
 	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
 	}
       } else {
-	// The dof index is not periodic.
+	// The DOF index is not periodic.
 	double value = *dofIt;
 	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
       }
@@ -652,33 +657,42 @@ namespace AMDiS {
   }
 
 
-#ifdef HAVE_PETSC_DEV
-  void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
+#ifdef HAVE_PETSC_DEV 
+  void PetscSolverSchur::updateDofData()
   {
-    FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
+    FUNCNAME("PetscSolverSchur::updateDofData()");
 
-    int nComponents = mat->getNumRows();
+    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
+
+    MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
 
     typedef map<int, DofContainer> RankToDofContainer;
     typedef map<DegreeOfFreedom, bool> DofIndexToBool;
-    
-    std::set<DegreeOfFreedom> boundaryDofsSet;
+
+    boundaryDofs.clear();
     std::set<DegreeOfFreedom> boundaryLocalDofs;
     RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
     for (RankToDofContainer::iterator rankIt = sendDofs.begin(); 
-	 rankIt != sendDofs.end(); ++rankIt)
+	 rankIt != sendDofs.end(); ++rankIt) {
       for (DofContainer::iterator dofIt = rankIt->second.begin(); 
 	   dofIt != rankIt->second.end(); ++dofIt) {
 	boundaryLocalDofs.insert(**dofIt);
 	  
-	for (int i = 0; i < nComponents; i++)
-	  boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
+	boundaryDofs.insert(meshDistributor->mapLocalToGlobal(**dofIt));
       }
+    }
       
-    boundaryDofs.clear();
-    boundaryDofs.insert(boundaryDofs.begin(), 
-			boundaryDofsSet.begin(), boundaryDofsSet.end());
-      
+    nBoundaryDofs = boundaryDofs.size();
+    mpi::getDofNumbering(mpiComm, nBoundaryDofs, 
+			 rStartBoundaryDofs, nOverallBoundaryDofs);
+
+    int counter = rStartBoundaryDofs;
+    for (std::set<DegreeOfFreedom>::iterator it = boundaryDofs.begin();
+	 it != boundaryDofs.end(); ++it)
+      mapGlobalBoundaryDof[*it] = counter++;
+
+
+
     std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
     RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
     for (RankToDofContainer::iterator rankIt = recvDofs.begin(); 
@@ -687,19 +701,153 @@ namespace AMDiS {
 	   dofIt != rankIt->second.end(); ++dofIt)
 	otherBoundaryLocalDofs.insert(**dofIt);
       
-    std::set<DegreeOfFreedom> interiorDofsSet;
+    interiorDofs.clear();
     DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
     for (DofIndexToBool::iterator dofIt = isRankDof.begin(); 
-	 dofIt != isRankDof.end(); ++dofIt)
+	 dofIt != isRankDof.end(); ++dofIt) {
       if (dofIt->second && 
 	  boundaryLocalDofs.count(dofIt->first) == 0 && 
 	  otherBoundaryLocalDofs.count(dofIt->first) == 0)
-	for (int i = 0; i < nComponents; i++)	    
-	  interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
+	interiorDofs.insert(meshDistributor->mapLocalToGlobal(dofIt->first));
+    }
       
-    interiorDofs.clear();
-    interiorDofs.insert(interiorDofs.begin(), 
-			interiorDofsSet.begin(), interiorDofsSet.end());
+    nInteriorDofs = interiorDofs.size();
+    mpi::getDofNumbering(mpiComm, nInteriorDofs, 
+			 rStartInteriorDofs, nOverallInteriorDofs);
+
+    counter = rStartInteriorDofs;
+    for (std::set<DegreeOfFreedom>::iterator it = interiorDofs.begin();
+	 it != interiorDofs.end(); ++it)
+      mapGlobalInteriorDof[*it] = counter++;
+
+
+    TEST_EXIT_DBG(nInteriorDofs > 0)("Should not happen!\n");
+
+
+    StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
+    for (RankToDofContainer::iterator sendIt = meshDistributor->getSendDofs().begin();
+	 sendIt != meshDistributor->getSendDofs().end(); ++sendIt) {
+      stdMpi.getSendData(sendIt->first).resize(0);
+      stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
+      for (DofContainer::iterator dofIt = sendIt->second.begin();
+	   dofIt != sendIt->second.end(); ++dofIt) {
+	int globalSendDof = meshDistributor->mapLocalToGlobal(**dofIt);
+
+	TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalSendDof))
+	  ("No mapping for boundary DOF %d!\n", globalSendDof);
+
+	stdMpi.getSendData(sendIt->first).push_back(mapGlobalBoundaryDof[globalSendDof]);
+      }
+    }     
+
+    stdMpi.updateSendDataSize();
+    stdMpi.recv(meshDistributor->getRecvDofs());
+    stdMpi.startCommunication();
+
+    for (RankToDofContainer::iterator recvIt = meshDistributor->getRecvDofs().begin();
+	 recvIt != meshDistributor->getRecvDofs().end(); ++recvIt) {
+      int i = 0;
+      for (DofContainer::iterator dofIt = recvIt->second.begin();
+	   dofIt != recvIt->second.end(); ++dofIt) {
+	int globalRecvDof = meshDistributor->mapLocalToGlobal(**dofIt);
+	mapGlobalBoundaryDof[globalRecvDof] = stdMpi.getRecvData(recvIt->first)[i++];
+      }
+    }
+  }
+
+
+  void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
+  {
+    FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
+
+    updateDofData();
+    
+    int nComponents = mat->getNumRows();
+    int nInteriorRows = nInteriorDofs * nComponents;
+    int nOverallInteriorRows = nOverallInteriorDofs * nComponents;
+    int nBoundaryRows = nBoundaryDofs * nComponents;
+    int nOverallBoundaryRows = nOverallBoundaryDofs * nComponents;
+
+    MSG("INTERIOR LOCAL MAT: %d x %d\n", nInteriorRows, nInteriorRows);
+    MSG("INTERIOR GLOBAL MAT: %d x %d\n", nOverallInteriorRows, nOverallInteriorRows);
+    MSG("BOUNDARY LOCAL MAT: %d x %d\n", nBoundaryRows, nBoundaryRows);
+    MSG("BOUNDARY GLOBAL MAT: %d x %d\n", nOverallBoundaryRows, nOverallBoundaryRows);
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nInteriorRows, nInteriorRows, 
+		    nOverallInteriorRows, nOverallInteriorRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA11);
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nBoundaryRows, nBoundaryRows, 
+		    nOverallBoundaryRows, nOverallBoundaryRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA22);
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nInteriorRows, nBoundaryRows, 
+		    nOverallInteriorRows, nOverallBoundaryRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA12);    
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nBoundaryRows, nInteriorRows, 
+		    nOverallBoundaryRows, nOverallInteriorRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA21);
+
+
+    for (int i = 0; i < nComponents; i++)
+      for (int j = 0; j < nComponents; j++)
+	if ((*mat)[i][j])
+	  setDofMatrix((*mat)[i][j], nComponents, i, j);
+
+    MatAssemblyBegin(matA11, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA11, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA12, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA12, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA21, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA21, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA22, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA22, MAT_FINAL_ASSEMBLY);
+
+
+    Mat tmpMat[2][2];
+    tmpMat[0][0] = matA11;
+    tmpMat[0][1] = matA12;
+    tmpMat[1][0] = matA21;
+    tmpMat[1][1] = matA22;
+
+    MatCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, 2, PETSC_NULL, 
+		  &tmpMat[0][0], &petscMatrix);
+    MatNestSetVecType(petscMatrix, VECNEST);
+    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
+
+
+    MatGetVecs(matA12, &petscRhsVec2, &petscRhsVec1);
+    MatGetVecs(matA12, &petscSolVec2, &petscSolVec1);
+
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
+
+    Vec tmpVec[2];
+    tmpVec[0] = petscRhsVec1;
+    tmpVec[1] = petscRhsVec2;
+    VecCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, tmpVec, &petscRhsVec);
+
+    tmpVec[0] = petscSolVec1;
+    tmpVec[1] = petscSolVec2;
+    VecCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, tmpVec, &petscSolVec);
+
+    VecAssemblyBegin(petscRhsVec1);
+    VecAssemblyEnd(petscRhsVec1);
+      
+    VecAssemblyBegin(petscRhsVec2);
+    VecAssemblyEnd(petscRhsVec2);
+
+    VecAssemblyBegin(petscRhsVec);
+    VecAssemblyEnd(petscRhsVec);
   }
 
 
@@ -711,23 +859,138 @@ namespace AMDiS {
     PC pc;
     KSPCreate(PETSC_COMM_WORLD, &solver);
     KSPGetPC(solver, &pc);
-    PCSetType(pc, PCFIELDSPLIT);
-      
-    IS interiorIs;
-    ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]), 
-		    PETSC_COPY_VALUES, &interiorIs);
-    PCFieldSplitSetIS(pc, "interior", interiorIs);
-      
-    IS boundaryIs;
-    ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]), 
-		    PETSC_COPY_VALUES, &boundaryIs);
-    PCFieldSplitSetIS(pc, "boundary", boundaryIs);
-     
-     
-    //    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
+    //    PCSetType(pc, PCFIELDSPLIT);
+    const PetscInt interiorField[1] = {0};
+    const PetscInt boundaryField[1] = {1};
+    PCFieldSplitSetFields(pc, "interior", 1, interiorField);
+    PCFieldSplitSetFields(pc, "boundary", 1, boundaryField);
+
+
+    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
     KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
     KSPSetFromOptions(solver);
     PCSetFromOptions(pc);
+
+    KSPSolve(solver, petscRhsVec, petscSolVec);
+
+    KSPDestroy(solver);
+
+    VecDestroy(petscRhsVec1);
+    VecDestroy(petscRhsVec2);    
+    VecDestroy(petscRhsVec);
+
+    VecDestroy(petscSolVec1);
+    VecDestroy(petscSolVec2);    
+    VecDestroy(petscSolVec);
+
+    MatDestroy(matA11);
+    MatDestroy(matA12);
+    MatDestroy(matA21);
+    MatDestroy(matA22);
+    MatDestroy(petscMatrix);
+
+    MSG("SOLVED IT!\n");
+    exit(0);
+  }
+  
+
+  void PetscSolverSchur::setDofMatrix(DOFMatrix* mat, int dispMult, 
+				      int dispAddRow, int dispAddCol)
+  {
+    FUNCNAME("PetscSolverSchur::setDofMatrix()");
+
+    TEST_EXIT(mat)("No DOFMatrix!\n");
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits= mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+
+    traits::col<Matrix>::type col(mat->getBaseMatrix());
+    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
+
+    typedef traits::range_generator<row, Matrix>::type cursor_type;
+    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+
+    vector<int> colsBoundary, colsInterior;
+    vector<double> valuesBoundary, valuesInterior;
+    colsBoundary.reserve(300);
+    colsInterior.reserve(300);
+    valuesBoundary.reserve(300);
+    valuesInterior.reserve(300);
+
+    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
+	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
+
+      // Global index of the current row DOF.
+      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+     
+      colsBoundary.clear();
+      colsInterior.clear();
+      valuesBoundary.clear();
+      valuesInterior.clear();
+      
+      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	   icursor != icend; ++icursor) {
+	int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+
+	if (boundaryDofs.count(globalColDof)) {
+	  int colIndex = 
+	    mapGlobalBoundaryDof[globalColDof] * dispMult + dispAddCol;
+	  
+	  colsBoundary.push_back(colIndex);
+	  valuesBoundary.push_back(value(*icursor));
+	} else {
+	  int colIndex = 
+	    mapGlobalInteriorDof[globalColDof] * dispMult + dispAddCol;
+
+	  colsInterior.push_back(colIndex);
+	  valuesInterior.push_back(value(*icursor));
+	}
+      }
+
+      if (boundaryDofs.count(globalRowDof)) {
+	int rowIndex = 
+	  mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAddRow;
+
+ 	MatSetValues(matA22, 1, &rowIndex, colsBoundary.size(), 
+ 		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
+ 	MatSetValues(matA21, 1, &rowIndex, colsInterior.size(), 
+ 		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
+      } else {
+	int rowIndex = 
+	  mapGlobalInteriorDof[globalRowDof] * dispMult + dispAddRow;
+
+  	MatSetValues(matA11, 1, &rowIndex, colsInterior.size(), 
+  		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
+  	MatSetValues(matA12, 1, &rowIndex, colsBoundary.size(), 
+  		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
+      }
+    }
+  }
+
+
+  void PetscSolverSchur::setDofVector(Vec& petscVec, DOFVector<double>* vec,
+				      int dispMult, int dispAdd, bool rankOnly)
+  {
+    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
+    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+      if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
+	continue;
+
+      // Calculate global row index of the dof.
+      DegreeOfFreedom globalRowDof = 
+	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
+
+      double value = *dofIt;
+
+      if (boundaryDofs.count(globalRowDof)) {
+	int index = mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAdd;
+	VecSetValues(petscRhsVec2, 1, &index, &value, ADD_VALUES);
+      } else {
+	int index = mapGlobalInteriorDof[globalRowDof] * dispMult + dispAdd;
+	VecSetValues(petscRhsVec1, 1, &index, &value, ADD_VALUES);
+      }
+    }
   }
 #endif
 
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index e5492b4e..ca0cdec5 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -23,15 +23,19 @@
 #ifndef AMDIS_PETSC_SOLVER_H
 #define AMDIS_PETSC_SOLVER_H
 
+#include <set>
+#include <map>
+#include <mpi.h>
+
 #include "AMDiS_fwd.h"
 #include "Global.h"
 #include "Parameters.h"
 #include "DOFMatrix.h"
 #include "parallel/MeshDistributor.h"
 
-#include "petsc.h"
-#include "petscsys.h"
-#include "petscao.h"
+#include <petsc.h>
+#include <petscsys.h>
+#include <petscao.h>
 
 namespace AMDiS {
 
@@ -40,12 +44,17 @@ namespace AMDiS {
   class PetscSolver
   {
   public:
-    PetscSolver(MeshDistributor *m)
-      : meshDistributor(m)
+    PetscSolver()
+      : meshDistributor(NULL)
     {}
 
     virtual ~PetscSolver() {}
 
+    void setMeshDistributor(MeshDistributor *m)
+    {
+      meshDistributor = m;
+    }
+
     /** \brief
      * Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
      * create the nnz structure of the PETSc matrix and the values are transfered to it.
@@ -67,8 +76,8 @@ namespace AMDiS {
   class PetscSolverGlobalMatrix : public PetscSolver
   {
   public:
-    PetscSolverGlobalMatrix(MeshDistributor *m)
-      : PetscSolver(m),
+    PetscSolverGlobalMatrix()
+      : PetscSolver(),
 	d_nnz(NULL),
 	o_nnz(NULL),
 	lastMeshNnz(0),
@@ -82,14 +91,14 @@ namespace AMDiS {
     void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);    
 
   protected:
-    /// Creates a new non zero pattern structure for the Petsc matrix. 
+    /// Creates a new non zero pattern structure for the PETSc matrix. 
     void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
     
-    /// Takes a dof matrix and sends the values to the global petsc matrix.
+    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
     void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
 		      int dispAddRow = 0, int dispAddCol = 0);
 
-    /// Takes a dof vector and sends its values to a given petsc vector.
+    /// Takes a DOF vector and sends its values to a given PETSc vector.
     void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
 		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
 
@@ -98,7 +107,7 @@ namespace AMDiS {
     Mat petscMatrix;
 
     /** \brief
-     * Petsc's vector structures for the rhs vector, the solution vector and a
+     * PETSc's vector structures for the rhs vector, the solution vector and a
      * temporary vector for calculating the final residuum.
      */
     Vec petscRhsVec, petscSolVec, petscTmpVec;
@@ -122,8 +131,8 @@ namespace AMDiS {
   class PetscSolverSchur : public PetscSolver
   {
   public:
-    PetscSolverSchur(MeshDistributor *m)
-      : PetscSolver(m)
+    PetscSolverSchur() 
+      : PetscSolver()
     {}
 
     void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
@@ -131,9 +140,46 @@ namespace AMDiS {
     void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
     
   protected:
-    vector<DegreeOfFreedom> boundaryDofs;
+    void updateDofData();
+
+    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
+    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
+		      int dispAddRow = 0, int dispAddCol = 0);
+
+    /// Takes a DOF vector and sends its values to a given PETSc vector.
+    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
+
+  protected:
+    int nBoundaryDofs;
+
+    int rStartBoundaryDofs;
+
+    int nOverallBoundaryDofs;
+
+    std::set<DegreeOfFreedom> boundaryDofs;
+
+    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalBoundaryDof;
+
+    int nInteriorDofs;
+
+    int rStartInteriorDofs;
+
+    int nOverallInteriorDofs;
+
+    std::set<DegreeOfFreedom> interiorDofs;    
+
+    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalInteriorDof;
+
+    Mat matA11, matA12, matA21, matA22;
+
+    Mat petscMatrix;
+
+    Vec petscRhsVec1, petscRhsVec2;
+
+    Vec petscSolVec1, petscSolVec2;
 
-    vector<DegreeOfFreedom> interiorDofs;
+    Vec petscRhsVec, petscSolVec;
   };
 #endif
 
-- 
GitLab