From 713cf136f2d380a225ac336df097d88ce4a18431 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Fri, 8 Apr 2011 10:48:04 +0000
Subject: [PATCH] Splitted PETSc solver.

---
 AMDiS/CMakeLists.txt                          |    2 +
 AMDiS/libtool                                 |    6 +-
 AMDiS/src/Makefile.am                         |    4 +
 AMDiS/src/Makefile.in                         |   25 +-
 AMDiS/src/parallel/MeshDistributor.cc         |   80 +-
 AMDiS/src/parallel/MeshDistributor.h          |   23 +
 AMDiS/src/parallel/ParallelProblemStatBase.h  |    3 +-
 AMDiS/src/parallel/PetscProblemStat.h         |    9 +
 AMDiS/src/parallel/PetscSolver.cc             | 1006 -----------------
 AMDiS/src/parallel/PetscSolver.h              |   98 +-
 AMDiS/src/parallel/PetscSolverGlobalMatrix.cc |  640 +++++++++++
 AMDiS/src/parallel/PetscSolverGlobalMatrix.h  |   82 ++
 AMDiS/src/parallel/PetscSolverSchur.cc        |  425 +++++++
 AMDiS/src/parallel/PetscSolverSchur.h         |   90 ++
 14 files changed, 1367 insertions(+), 1126 deletions(-)
 create mode 100644 AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
 create mode 100644 AMDiS/src/parallel/PetscSolverGlobalMatrix.h
 create mode 100644 AMDiS/src/parallel/PetscSolverSchur.cc
 create mode 100644 AMDiS/src/parallel/PetscSolverSchur.h

diff --git a/AMDiS/CMakeLists.txt b/AMDiS/CMakeLists.txt
index 73782538..c1c50462 100644
--- a/AMDiS/CMakeLists.txt
+++ b/AMDiS/CMakeLists.txt
@@ -206,6 +206,8 @@ if(ENABLE_PARALLEL_DOMAIN)
                 ${SOURCE_DIR}/parallel/ParMetisPartitioner.cc
 		${SOURCE_DIR}/parallel/PetscProblemStat.cc
 		${SOURCE_DIR}/parallel/PetscSolver.cc
+		${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
+		${SOURCE_DIR}/parallel/PetscSolverSchur.cc
 		${SOURCE_DIR}/parallel/StdMpi.cc
 		${SOURCE_DIR}/parallel/ZoltanPartitioner.cc)
 	SET(COMPILEFLAGS "${COMPILEFLAGS} -DHAVE_PARALLEL_DOMAIN_AMDIS=1")
diff --git a/AMDiS/libtool b/AMDiS/libtool
index 6817f526..20ca6d75 100755
--- a/AMDiS/libtool
+++ b/AMDiS/libtool
@@ -44,7 +44,7 @@ available_tags=" CXX F77"
 
 # ### BEGIN LIBTOOL CONFIG
 
-# Libtool was configured on host deimos104:
+# Libtool was configured on host deimos102:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
@@ -6760,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
 # End:
 # ### BEGIN LIBTOOL TAG CONFIG: CXX
 
-# Libtool was configured on host deimos104:
+# Libtool was configured on host deimos102:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
@@ -7065,7 +7065,7 @@ include_expsyms=""
 
 # ### BEGIN LIBTOOL TAG CONFIG: F77
 
-# Libtool was configured on host deimos104:
+# Libtool was configured on host deimos102:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
diff --git a/AMDiS/src/Makefile.am b/AMDiS/src/Makefile.am
index 71ede176..ef7e2c32 100644
--- a/AMDiS/src/Makefile.am
+++ b/AMDiS/src/Makefile.am
@@ -23,6 +23,8 @@ if USE_PARALLEL_DOMAIN_AMDIS
   parallel/ParMetisPartitioner.cc \
   parallel/PetscProblemStat.cc \
   parallel/PetscSolver.cc \
+  parallel/PetscSolverGlobalMatrix.cc \
+  parallel/PetscSolverSchur.cc \
   parallel/StdMpi.cc
   libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
   AMDIS_INCLUDES += -I$(PARMETIS_DIR)
@@ -257,6 +259,8 @@ parallel/ParallelProblemStatBase.h \
 parallel/ParMetisPartitioner.h \
 parallel/PetscProblemStat.h \
 parallel/PetscSolver.h \
+parallel/PetscSolverGlobalMatrix.h \
+parallel/PetscSolverSchur.h \
 parallel/StdMpi.h \
 parallel/ZoltanPartitioner.h \
 reinit/BoundaryElementDist.h \
diff --git a/AMDiS/src/Makefile.in b/AMDiS/src/Makefile.in
index e72541b6..c0502928 100644
--- a/AMDiS/src/Makefile.in
+++ b/AMDiS/src/Makefile.in
@@ -49,6 +49,8 @@ host_triplet = @host@
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/ParMetisPartitioner.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscProblemStat.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscSolver.cc \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscSolverGlobalMatrix.cc \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscSolverSchur.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/StdMpi.cc
 
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1
@@ -95,7 +97,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
 	parallel/MpiHelper.cc parallel/ParallelDebug.cc \
 	parallel/ParallelProblemStatBase.cc \
 	parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \
-	parallel/PetscSolver.cc parallel/StdMpi.cc \
+	parallel/PetscSolver.cc parallel/PetscSolverGlobalMatrix.cc \
+	parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
 	parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
 	AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
 	BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
@@ -142,6 +145,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-ParMetisPartitioner.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscProblemStat.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscSolver.lo \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscSolverGlobalMatrix.lo \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscSolverSchur.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-StdMpi.lo
 @USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo
 am__objects_3 = $(am__objects_1) $(am__objects_2)
@@ -577,6 +582,8 @@ parallel/ParallelProblemStatBase.h \
 parallel/ParMetisPartitioner.h \
 parallel/PetscProblemStat.h \
 parallel/PetscSolver.h \
+parallel/PetscSolverGlobalMatrix.h \
+parallel/PetscSolverSchur.h \
 parallel/StdMpi.h \
 parallel/ZoltanPartitioner.h \
 reinit/BoundaryElementDist.h \
@@ -881,6 +888,8 @@ distclean-compile:
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
@@ -1048,6 +1057,20 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
 @AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
 @am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
 
+libamdis_la-PetscSolverGlobalMatrix.lo: parallel/PetscSolverGlobalMatrix.cc
+@am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverGlobalMatrix.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc; \
+@am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	source='parallel/PetscSolverGlobalMatrix.cc' object='libamdis_la-PetscSolverGlobalMatrix.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc
+
+libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
+@am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
+@am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
+
 libamdis_la-StdMpi.lo: parallel/StdMpi.cc
 @am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
 @am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index eb1ca3d7..55d132b7 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -55,6 +55,9 @@ namespace AMDiS {
   using namespace boost::filesystem;
   using namespace std;
 
+  const Flag MeshDistributor::BOUNDARY_SUBOBJ_SORTED     = 0X01L;
+  const Flag MeshDistributor::BOUNDARY_EDGE_SCHUR        = 0X02L;
+
   inline bool cmpDofsByValue(const DegreeOfFreedom* dof1, const DegreeOfFreedom* dof2)
   {
     return (*dof1 < *dof2);
@@ -80,7 +83,8 @@ namespace AMDiS {
       nMeshChangesAfterLastRepartitioning(0),
       repartitioningCounter(0),
       debugOutputDir(""),
-      lastMeshChangeIndex(0)
+      lastMeshChangeIndex(0),
+      createBoundaryDofFlag(0)
   {
     FUNCNAME("MeshDistributor::ParalleDomainBase()");
 
@@ -1637,6 +1641,50 @@ namespace AMDiS {
   }
 
 
+  void MeshDistributor::createBoundaryDofs()
+  {
+    FUNCNAME("MeshDistributor::createBoundaryDofs()");
+
+    sendDofs.clear();
+    recvDofs.clear();
+
+    if (createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) {
+      MSG("WITH BOUNDARY SUBOBJ SORTED!\n");
+      DofContainer dofs;
+
+      for (int geo = FACE; geo >= VERTEX; geo--) {
+	boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] = 0;
+
+	for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
+	  if (it->rankObj.subObj == geo) {
+	    dofs.clear();
+	    it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
+	   
+	    boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] += dofs.size();
+	    DofContainer &tmp = sendDofs[it.getRank()];
+	    tmp.insert(tmp.end(), dofs.begin(), dofs.end());
+	  }
+	}
+      }
+
+      for (int geo = FACE; geo >= VERTEX; geo--)
+	for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
+	  if (it->rankObj.subObj == geo) 
+	    it->rankObj.el->getAllDofs(feSpace, it->rankObj, 
+				       recvDofs[it.getRank()]);
+    } else {
+      MSG("GAAAAAAANZ NORMAL!\n");
+      for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it)
+	it->rankObj.el->getAllDofs(feSpace, it->rankObj, 
+				   sendDofs[it.getRank()]);
+      
+      for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
+	it->rankObj.el->getAllDofs(feSpace, it->rankObj, 
+				   recvDofs[it.getRank()]);
+    }
+  }
+
+
   void MeshDistributor::removeMacroElements()
   {
     FUNCNAME("MeshDistributor::removeMacroElements()");
@@ -1662,9 +1710,6 @@ namespace AMDiS {
     debug::createSortedDofs(mesh, elMap);   
 #endif
 
-    sendDofs.clear();
-    recvDofs.clear();
-
 
     // === Get all DOFs in ranks partition. ===
 
@@ -1677,31 +1722,22 @@ namespace AMDiS {
 
 
     // === Traverse interior boundaries and get all DOFs on them. ===
+    createBoundaryDofs();
 
-    for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
-      DofContainer dofs;	
-      it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
-
-      for (unsigned int i = 0; i < dofs.size(); i++)
-	sendDofs[it.getRank()].push_back(dofs[i]);
-    }
-
-
-    for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) {
-      DofContainer dofs;
-      it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
-
-      for (unsigned int i = 0; i < dofs.size(); i++) {
+    // All DOFs that must be received are DOFs not owned by rank and have 
+    // therefore to be removed from the set 'rankDofs'.
+    for (RankToDofContainer::iterator recvIt = recvDofs.begin();
+	 recvIt != recvDofs.end(); ++recvIt) {
+      for (DofContainer::iterator dofIt = recvIt->second.begin();
+	   dofIt != recvIt->second.end(); ++dofIt) {
 	DofContainer::iterator eraseIt = 
-	  find(rankDofs.begin(), rankDofs.end(), dofs[i]);
+	  find(rankDofs.begin(), rankDofs.end(), *dofIt);
 	if (eraseIt != rankDofs.end()) 
 	  rankDofs.erase(eraseIt);
-
-	recvDofs[it.getRank()].push_back(dofs[i]);
       }
     }
 
-
+	
     // Get displacment for global rank DOF ordering and global DOF number.
     nRankDofs = rankDofs.size();
     mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index 07b16877..86ec876c 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -44,7 +44,14 @@
 namespace AMDiS {
 
   using namespace std;
+
+
+  struct BoundaryDofInfo
+  {
+    map<GeoIndex, int> nGeoDofs;
+  };
   
+
   class MeshDistributor
   {
   protected:
@@ -293,6 +300,11 @@ namespace AMDiS {
 
     void check3dValidMesh();
 
+    void setBoundaryDofRequirement(Flag flag)
+    {
+      createBoundaryDofFlag = flag;
+    }
+
   protected:
     /** \brief
      * Determines the interior boundaries, i.e. boundaries between ranks, and stores
@@ -306,6 +318,8 @@ namespace AMDiS {
 
     void createBoundaryData();
 
+    void createBoundaryDofs();
+
     /// Removes all macro elements from the mesh that are not part of ranks partition.
     void removeMacroElements();
 
@@ -599,6 +613,15 @@ namespace AMDiS {
     /// redistributed for the first time.
     vector<MacroElement*> allMacroElements;
 
+    Flag createBoundaryDofFlag;
+
+    BoundaryDofInfo boundaryDofInfo;
+  public:
+    ///
+    static const Flag BOUNDARY_SUBOBJ_SORTED;
+
+    static const Flag BOUNDARY_EDGE_SCHUR;
+
     friend class ParallelDebug;
   };
 }
diff --git a/AMDiS/src/parallel/ParallelProblemStatBase.h b/AMDiS/src/parallel/ParallelProblemStatBase.h
index aaac9bb2..a2d62868 100644
--- a/AMDiS/src/parallel/ParallelProblemStatBase.h
+++ b/AMDiS/src/parallel/ParallelProblemStatBase.h
@@ -37,12 +37,13 @@ namespace AMDiS {
 	meshDistributor(NULL)
     {}
 
+    virtual ~ParallelProblemStatBase() {}
 
     void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
 			   bool assembleMatrix,
 			   bool assembleVector);
 
-    void addToMeshDistributor(MeshDistributor& m);
+    virtual void addToMeshDistributor(MeshDistributor& m);
 
   protected:
     void processMemUsage(double& vm_usage, double& resident_set);
diff --git a/AMDiS/src/parallel/PetscProblemStat.h b/AMDiS/src/parallel/PetscProblemStat.h
index 71d8e9e4..e799d12c 100644
--- a/AMDiS/src/parallel/PetscProblemStat.h
+++ b/AMDiS/src/parallel/PetscProblemStat.h
@@ -27,6 +27,8 @@
 #include "Global.h"
 #include "parallel/ParallelProblemStatBase.h"
 #include "parallel/PetscSolver.h"
+#include "parallel/PetscSolverGlobalMatrix.h"
+#include "parallel/PetscSolverSchur.h"
 
 namespace AMDiS {
   
@@ -58,6 +60,13 @@ namespace AMDiS {
       delete petscSolver;
     }
 
+    void addToMeshDistributor(MeshDistributor& m)
+    {
+      ParallelProblemStatBase::addToMeshDistributor(m);
+
+      meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
+    }
+
     void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
 
   protected:
diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index f56b300b..6751f1c9 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -18,15 +18,6 @@ namespace AMDiS {
 
   using namespace std;
 
-  PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
-  {    
-    if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
-      cout << "[0]  Petsc-Iteration " << iter << ": " << rnorm << endl;
-
-    return 0;
-  }
-
-
   void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo,
 				      bool iterationCounter, 
 				      bool residual)
@@ -49,1001 +40,4 @@ namespace AMDiS {
     }
   }
 
-
-  void PetscSolverGlobalMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
-  {
-    FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
-
-    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
-    TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
-    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
-
-    double wtime = MPI::Wtime();
-    int nComponents = mat->getNumRows();
-    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
-    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
-
-    // === Create PETSc vector (rhs, solution and a temporary vector). ===
-
-    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
-    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
-    VecSetType(petscRhsVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
-    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
-    VecSetType(petscSolVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
-    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
-    VecSetType(petscTmpVec, VECMPI);
-
-    int recvAllValues = 0;
-    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
-    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
-
-    if (!d_nnz || recvAllValues != 0) {
-      if (d_nnz) {
-	delete [] d_nnz;
-	d_nnz = NULL;
-	delete [] o_nnz;
-	o_nnz = NULL;
-      }
-
-      createPetscNnzStructure(mat);
-      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
-    }
-
-
-    // === Create PETSc matrix with the computed nnz data structure. ===
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
-		    0, d_nnz, 0, o_nnz, &petscMatrix);
-    
-#if (DEBUG != 0)
-    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
-#endif
-
-#if (DEBUG != 0)
-    int a, b;
-    MatGetOwnershipRange(petscMatrix, &a, &b);
-    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
-      ("Wrong matrix ownership range!\n");
-    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
-      ("Wrong matrix ownership range!\n");
-#endif
-
-
-    // === Transfer values from DOF matrices to the PETSc matrix. === 
-
-    for (int i = 0; i < nComponents; i++)
-      for (int j = 0; j < nComponents; j++)
-	if ((*mat)[i][j])
-	  setDofMatrix((*mat)[i][j], nComponents, i, j);
-
-#if (DEBUG != 0)
-    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
-#endif
-
-    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
-
-#if (DEBUG != 0)
-    MSG("Fill petsc matrix 3 needed %.5f seconds\n", 
-	TIME_USED(MPI::Wtime(), wtime));
-#endif
-
-    // === Transfer values from DOF vector to the PETSc vector. === 
-
-    for (int i = 0; i < nComponents; i++)
-      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
-
-    VecAssemblyBegin(petscRhsVec);
-    VecAssemblyEnd(petscRhsVec);
-
-    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
-  }
-
-
-  void PetscSolverGlobalMatrix::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
-  {
-    FUNCNAME("PetscSolverGlobalMatrix::solvePetscMatrix()");
-
-    int nComponents = vec.getSize();
-
-    // === Set old solution to be initiual guess for PETSc solver. ===
-    if (!zeroStartVector) {
-      VecSet(petscSolVec, 0.0);
-      
-      for (int i = 0; i < nComponents; i++)
-	setDofVector(petscSolVec, vec.getDOFVector(i), nComponents, i, true);
-      
-      VecAssemblyBegin(petscSolVec);
-      VecAssemblyEnd(petscSolVec);
-    }
-
-    // === Init PETSc solver. ===
-    KSPCreate(PETSC_COMM_WORLD, &solver);
-    KSPGetPC(solver, &pc);
-    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
-    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
-    KSPSetType(solver, KSPBCGS);
-    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
-    KSPSetFromOptions(solver);
-    PCSetFromOptions(pc);
-
-    // Do not delete the solution vector, use it for the initial guess.
-    if (!zeroStartVector)
-      KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
-
-   
-    // PETSc.
-    KSPSolve(solver, petscRhsVec, petscSolVec);
-
-    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
-    int nRankDofs = meshDistributor->getNumberRankDofs();
-    PetscScalar *vecPointer;
-    VecGetArray(petscSolVec, &vecPointer);
-
-    for (int i = 0; i < nComponents; i++) {
-      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
-      for (int j = 0; j < nRankDofs; j++)
-	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
-	  vecPointer[j * nComponents + i]; 
-    }
-
-    VecRestoreArray(petscSolVec, &vecPointer);
-
-
-    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
-    // === more than one partition.                                       ===
-    meshDistributor->synchVector(vec);
-
-
-    // Print iteration counter and residual norm of the solution.
-    printSolutionInfo(adaptInfo);
-
-
-    // === Destroy PETSc's variables. ===
-
-    MatDestroy(petscMatrix);
-    VecDestroy(petscRhsVec);
-    VecDestroy(petscSolVec);
-    VecDestroy(petscTmpVec);
-    KSPDestroy(solver);
-  }
-
-
-  void PetscSolverGlobalMatrix::setDofMatrix(DOFMatrix* mat, int dispMult, 
-				      int dispAddRow, int dispAddCol)
-  {
-    FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()");
-
-    TEST_EXIT(mat)("No DOFMatrix!\n");
-
-    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
-    namespace traits= mtl::traits;
-    typedef DOFMatrix::base_matrix_type Matrix;
-
-    traits::col<Matrix>::type col(mat->getBaseMatrix());
-    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
-
-    typedef traits::range_generator<row, Matrix>::type cursor_type;
-    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
-
-    vector<int> cols;
-    vector<double> values;
-    cols.reserve(300);
-    values.reserve(300);
-    
-    vector<int> globalCols;
-
-
-    // === Traverse all rows of the dof matrix and insert row wise the values ===
-    // === to the PETSc matrix.                                               ===
-
-    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
-	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
-
-      // Global index of the current row DOF.
-      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-      // Test if the current row DOF is a periodic DOF.
-      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
-
-      if (!periodicRow) {
-	// === Row DOF index is not periodic. ===
-
-	// Calculate PETSc row index.
-
-	int rowIndex = globalRowDof * dispMult + dispAddRow;
-
-	cols.clear();
-	values.clear();
-
-	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-	     icursor != icend; ++icursor) {
-
-	  // Global index of the current column index.
-	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
-	  // Test if the current col dof is a periodic dof.
-	  bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
-	  // Calculate PETSc col index.
-	  int colIndex = globalColDof * dispMult + dispAddCol;
-
-	  // Ignore all zero entries, expect it is a diagonal entry.
- 	  if (value(*icursor) == 0.0 && rowIndex != colIndex)
- 	    continue;
-
-	  if (!periodicCol) {
-	    // Calculate the exact position of the column index in the PETSc matrix.
-	    cols.push_back(colIndex);
-	    values.push_back(value(*icursor));
-	  } else {
-	    // === Row index is not periodic, but column index is. ===
-
-	    // Create set of all periodic associations of the column index.
-	    std::set<int> perAsc;
-	    std::set<int>& perColAsc = 
-	      meshDistributor->getPerDofAssociations(globalColDof);
-	    for (std::set<int>::iterator it = perColAsc.begin(); 
-		 it != perColAsc.end(); ++it)
-	      if (*it >= -3)
-		perAsc.insert(*it);
-    
-	    // Scale value to the number of periodic associations of the column index.
-	    double scaledValue = 
-	      value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
-
-	    
-	    // === Create set of all matrix column indices due to the periodic ===
-	    // === associations of the column DOF index.                       ===
-
-	    vector<int> newCols;
-
-	    // First, add the original matrix index.
-	    newCols.push_back(globalColDof);
-	    
-	    // And add all periodic matrix indices.
-	    for (std::set<int>::iterator it = perAsc.begin(); 
-		 it != perAsc.end(); ++it) {
-	      int nCols = static_cast<int>(newCols.size());
-
-	      for (int i = 0; i < nCols; i++) {
- 		TEST_EXIT_DBG(meshDistributor->isPeriodicDof(newCols[i], *it))
- 		  ("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
-		   *it, newCols[i]);
-
-		newCols.push_back(meshDistributor->getPeriodicMapping(newCols[i], *it));
-	      }
-	    }
-
-	    for (unsigned int i = 0; i < newCols.size(); i++) {
-	      cols.push_back(newCols[i] * dispMult + dispAddCol);
-	      values.push_back(scaledValue);	      
-	    }
-	  }
-	}
-
-	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
-		     &(cols[0]), &(values[0]), ADD_VALUES);	
-      } else {
-	// === Row DOF index is periodic. ===
-
-	// Because this row is periodic, we will have to add the entries of this 
-	// matrix row to multiple rows. The following maps store to each row an
-	// array of column indices and values of the entries that must be added to
-	// the PETSc matrix.
-	map<int, vector<int> > colsMap;
-	map<int, vector<double> > valsMap;
-
-	// Traverse all column entries.
-	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-	     icursor != icend; ++icursor) {
-
-	  // Global index of the current column index.
-	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
-
-	  // Ignore all zero entries, expect it is a diagonal entry.
- 	  if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
- 	    continue;
-
-
-	  // === Add all periodic associations of both, the row and the column ===
-	  // === indices to the set perAsc.                                    ===
-
-	  std::set<int> perAsc;
-
-	  if (meshDistributor->isPeriodicDof(globalColDof)) {
-	    std::set<int>& perColAsc = 
-	      meshDistributor->getPerDofAssociations(globalColDof);
-	    for (std::set<int>::iterator it = perColAsc.begin(); 
-		 it != perColAsc.end(); ++it)
-	      if (*it >= -3)
-		perAsc.insert(*it);
-	  }
-
-	  std::set<int>& perRowAsc = 
-	    meshDistributor->getPerDofAssociations(globalRowDof);
-	  for (std::set<int>::iterator it = perRowAsc.begin(); 
-	       it != perRowAsc.end(); ++it)
-	    if (*it >= -3)
-	      perAsc.insert(*it);
-
-	  // Scale the value with respect to the number of periodic associations.
-	  double scaledValue = 
-	    value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
-
-
-	  // === Create all matrix entries with respect to the periodic  ===
-	  // === associations of the row and column indices.             ===
-
-	  vector<pair<int, int> > entry;
-	  
-	  // First, add the original entry.
-	  entry.push_back(make_pair(globalRowDof, globalColDof));
-
-	  // Then, traverse the periodic associations of the row and column indices
-	  // and create the corresponding entries.
-	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
-	    int nEntry = static_cast<int>(entry.size());
-	    for (int i = 0; i < nEntry; i++) {
-	      int perRowDof = 0;
-	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first))
-		perRowDof = meshDistributor->getPeriodicMapping(entry[i].first, *it);
-	      else
-		perRowDof = entry[i].first;
-
-	      int perColDof;
-	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second))
-		perColDof = meshDistributor->getPeriodicMapping(entry[i].second, *it);
-	      else
-		perColDof = entry[i].second;	      	      
-	      
-
-	      entry.push_back(make_pair(perRowDof, perColDof));
-	    }
-	  }
-
-
-	  // === Translate the matrix entries to PETSc's matrix.
-
-	  for (vector<pair<int, int> >::iterator eIt = entry.begin(); 
-	       eIt != entry.end(); ++eIt) {
-	    // Calculate row and column indices of the PETSc matrix.
-	    int rowIndex = eIt->first * dispMult + dispAddRow;
-	    int colIndex = eIt->second * dispMult + dispAddCol;
-
-	    colsMap[rowIndex].push_back(colIndex);
-	    valsMap[rowIndex].push_back(scaledValue);
-	  }
-	}
-
-
-	// === Finally, add all periodic rows to the PETSc matrix. ===
-
-	for (map<int, vector<int> >::iterator rowIt = colsMap.begin();
-	     rowIt != colsMap.end(); ++rowIt) {
-	  TEST_EXIT_DBG(rowIt->second.size() == valsMap[rowIt->first].size())
-	    ("Should not happen!\n");
-
-	  int rowIndex = rowIt->first;
-	  MatSetValues(petscMatrix, 1, &rowIndex, rowIt->second.size(),
-		       &(rowIt->second[0]), &(valsMap[rowIt->first][0]), ADD_VALUES);
-	}
-      }
-    }
-  }
-
-
-  void PetscSolverGlobalMatrix::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
-				      int dispMult, int dispAdd, bool rankOnly)
-  {
-    FUNCNAME("PetscSolverGlobalMatrix::setDofVector()");
-
-    // Traverse all used DOFs in the dof vector.
-    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
-    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
-      if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
-	continue;
-
-      // Calculate global row index of the DOF.
-      DegreeOfFreedom globalRowDof = 
-	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
-      // Calculate PETSc index of the row DOF.
-      int index = globalRowDof * dispMult + dispAdd;
-
-      if (meshDistributor->isPeriodicDof(globalRowDof)) {
-	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
-	double value = *dofIt / (perAsc.size() + 1.0);
-	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
-
-	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
-	  int mappedDof = meshDistributor->getPeriodicMapping(globalRowDof, *perIt);
-	  int mappedIndex = mappedDof * dispMult + dispAdd;
-	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
-	}
-      } else {
-	// The DOF index is not periodic.
-	double value = *dofIt;
-	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
-      }
-    }
-  }
-
-
-  void PetscSolverGlobalMatrix::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
-  {
-    FUNCNAME("PetscSolverGlobalMatrix::createPetscNnzStructure()");
-
-    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
-    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
-
-    int nComponents = mat->getNumRows();
-    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
-    d_nnz = new int[nRankRows];
-    o_nnz = new int[nRankRows];
-    for (int i = 0; i < nRankRows; i++) {
-      d_nnz[i] = 0;
-      o_nnz[i] = 0;
-    }
-
-    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
-    namespace traits = mtl::traits;
-    typedef DOFMatrix::base_matrix_type Matrix;
-    typedef vector<pair<int, int> > MatrixNnzEntry;
-    typedef map<int, DofContainer> RankToDofContainer;
-
-    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
-    // that this rank will send to. These nnz entries will be assembled on this rank,
-    // but because the row DOFs are not DOFs of this rank they will be send to the
-    // owner of the row DOFs.
-    map<int, MatrixNnzEntry> sendMatrixEntry;
-
-    // Maps to each DOF that must be send to another rank the rank number of the
-    // receiving rank.
-    map<DegreeOfFreedom, int> sendDofToRank;
-
-
-    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
-    for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
-	 it != meshDistributor->getRecvDofs().end(); ++it) {
-      sendMatrixEntry[it->first].resize(0);
-
-      for (DofContainer::iterator dofIt = it->second.begin();
-	   dofIt != it->second.end(); ++dofIt)
-	sendDofToRank[**dofIt] = it->first;
-    }
-
-
-    std::set<int> recvFromRank;
-    for (RankToDofContainer::iterator it = meshDistributor->getSendDofs().begin();
-	 it != meshDistributor->getSendDofs().end(); ++it)
-      recvFromRank.insert(it->first);
-
-
-    for (int i = 0; i < nComponents; i++) {
-      for (int j = 0; j < nComponents; j++) {
- 	if (!(*mat)[i][j])
-	  continue;
-
-	Matrix bmat = (*mat)[i][j]->getBaseMatrix();
-
-	traits::col<Matrix>::type col(bmat);
-	traits::const_value<Matrix>::type value(bmat);
-	  
-	typedef traits::range_generator<row, Matrix>::type cursor_type;
-	typedef traits::range_generator<nz, cursor_type>::type icursor_type;
-	
-	for (cursor_type cursor = begin<row>(bmat), 
-	       cend = end<row>(bmat); cursor != cend; ++cursor) {
-	  
-	  int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-
-	  vector<int> rows;
-	  rows.push_back(globalRowDof);
-	  vector<int> rowRank;
-	  if (meshDistributor->getIsRankDof(*cursor)) {
-	    rowRank.push_back(meshDistributor->getMpiRank());
-	  } else {
-	    // Find out who is the member of this DOF.
-	    TEST_EXIT_DBG(sendDofToRank.count(*cursor))("DOF %d has no receiver!\n", *cursor);
-	    
-	    rowRank.push_back(sendDofToRank[*cursor]);
-	  }
-
-	  // Map the local row number to the global DOF index and create from it
-	  // the global PETSc row index of this DOF.
-	  
-	  int petscRowIdx = globalRowDof * nComponents + i;
-	  
-	  if (meshDistributor->getIsRankDof(*cursor)) {
-	    	    
-	    // === The current row DOF is a rank dof, so create the corresponding ===
-	    // === nnz values directly on rank's nnz data.                        ===
-	    
-	    // This is the local row index of the local PETSc matrix.
-	    int localPetscRowIdx = 
-	      petscRowIdx - meshDistributor->getRstart() * nComponents;
-	    
-	    TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
-	      ("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
-	       *cursor, 
-	       meshDistributor->mapLocalToGlobal(*cursor), 
-	       petscRowIdx, 
-	       localPetscRowIdx, 
-	       meshDistributor->getRstart(), 
-	       nComponents, 
-	       nRankRows);
-	    
-	    
-	    // Traverse all non zero entries in this row.
-	    for (icursor_type icursor = begin<nz>(cursor), 
-		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
-	      int petscColIdx = 
-		meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
-	      
-	      if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
-		// The row DOF is a rank DOF, if also the column is a rank DOF, 
-		// increment the d_nnz values for this row, otherwise the o_nnz value.
-		if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
-		    petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
-		  d_nnz[localPetscRowIdx]++;
-		else
-		  o_nnz[localPetscRowIdx]++;
-	      }    
-	    }
-	  } else {
-	    // === The current row DOF is not a rank dof, i.e., it will be created ===
-	    // === on this rank, but after this it will be send to another rank    ===
-	    // === matrix. So we need to send also the corresponding nnz structure ===
-	    // === of this row to the corresponding rank.                          ===
-	    
-	    // Send all non zero entries to the member of the row DOF.
-	    int sendToRank = sendDofToRank[*cursor];
-	    
-	    for (icursor_type icursor = begin<nz>(cursor), 
-		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
-	      if (value(*icursor) != 0.0) {
-		int petscColIdx = 
-		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
-		
-		sendMatrixEntry[sendToRank].
-		  push_back(make_pair(petscRowIdx, petscColIdx));
-	      }
-	    }
-	    
-	  } // if (isRankDof[*cursor]) ... else ...
-	} // for each row in mat[i][j]
-      } 
-    }
-
-    // === Send and recv the nnz row structure to/from other ranks. ===
-
-    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
-    stdMpi.send(sendMatrixEntry);
-    for (std::set<int>::iterator it = recvFromRank.begin(); 
-	 it != recvFromRank.end(); ++it)
-      stdMpi.recv(*it);
-    stdMpi.startCommunication();
-
-
-    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
-    // === the PETSc nnz data structure.                                           ===
-
-    for (map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
-	 it != stdMpi.getRecvData().end(); ++it) {
-      if (it->second.size() > 0) {
-	for (unsigned int i = 0; i < it->second.size(); i++) {
-	  int r = it->second[i].first;
-	  int c = it->second[i].second;
-
-	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
-
-	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
-	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
-	     r, localRowIdx, nRankRows, it->first);
-	  
-	  if (c < meshDistributor->getRstart() * nComponents || 
-	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
-	    o_nnz[localRowIdx]++;
-	  else
-	    d_nnz[localRowIdx]++;
-	}
-      }
-    }
-
-    // The above algorithm for calculating the number of nnz per row over-
-    // approximates the value, i.e., the number is always equal or larger to 
-    // the real number of nnz values in the global parallel matrix. For small
-    // matrices, the problem may arise, that the result is larger than the
-    // number of elements in a row. This is fixed in the following.
-
-    if (nRankRows < 100) 
-      for (int i = 0; i < nRankRows; i++)
-	d_nnz[i] = std::min(d_nnz[i], nRankRows);
-  }
-
-
-#ifdef HAVE_PETSC_DEV 
-  void PetscSolverSchur::updateDofData()
-  {
-    FUNCNAME("PetscSolverSchur::updateDofData()");
-
-    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
-
-    MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
-
-    typedef map<int, DofContainer> RankToDofContainer;
-    typedef map<DegreeOfFreedom, bool> DofIndexToBool;
-
-    boundaryDofs.clear();
-    std::set<DegreeOfFreedom> boundaryLocalDofs;
-    RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
-    for (RankToDofContainer::iterator rankIt = sendDofs.begin(); 
-	 rankIt != sendDofs.end(); ++rankIt) {
-      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
-	   dofIt != rankIt->second.end(); ++dofIt) {
-	boundaryLocalDofs.insert(**dofIt);
-	  
-	boundaryDofs.insert(meshDistributor->mapLocalToGlobal(**dofIt));
-      }
-    }
-      
-    nBoundaryDofs = boundaryDofs.size();
-    mpi::getDofNumbering(mpiComm, nBoundaryDofs, 
-			 rStartBoundaryDofs, nOverallBoundaryDofs);
-
-    int counter = rStartBoundaryDofs;
-    for (std::set<DegreeOfFreedom>::iterator it = boundaryDofs.begin();
-	 it != boundaryDofs.end(); ++it)
-      mapGlobalBoundaryDof[*it] = counter++;
-
-
-
-    std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
-    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
-    for (RankToDofContainer::iterator rankIt = recvDofs.begin(); 
-	 rankIt != recvDofs.end(); ++rankIt)
-      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
-	   dofIt != rankIt->second.end(); ++dofIt)
-	otherBoundaryLocalDofs.insert(**dofIt);
-      
-    interiorDofs.clear();
-    DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
-    for (DofIndexToBool::iterator dofIt = isRankDof.begin(); 
-	 dofIt != isRankDof.end(); ++dofIt) {
-      if (dofIt->second && 
-	  boundaryLocalDofs.count(dofIt->first) == 0 && 
-	  otherBoundaryLocalDofs.count(dofIt->first) == 0)
-	interiorDofs.insert(meshDistributor->mapLocalToGlobal(dofIt->first));
-    }
-      
-    nInteriorDofs = interiorDofs.size();
-    mpi::getDofNumbering(mpiComm, nInteriorDofs, 
-			 rStartInteriorDofs, nOverallInteriorDofs);
-
-    counter = rStartInteriorDofs;
-    for (std::set<DegreeOfFreedom>::iterator it = interiorDofs.begin();
-	 it != interiorDofs.end(); ++it)
-      mapGlobalInteriorDof[*it] = counter++;
-
-
-    TEST_EXIT_DBG(nInteriorDofs > 0)("Should not happen!\n");
-
-
-    StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
-    for (RankToDofContainer::iterator sendIt = meshDistributor->getSendDofs().begin();
-	 sendIt != meshDistributor->getSendDofs().end(); ++sendIt) {
-      stdMpi.getSendData(sendIt->first).resize(0);
-      stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
-      for (DofContainer::iterator dofIt = sendIt->second.begin();
-	   dofIt != sendIt->second.end(); ++dofIt) {
-	int globalSendDof = meshDistributor->mapLocalToGlobal(**dofIt);
-
-	TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalSendDof))
-	  ("No mapping for boundary DOF %d!\n", globalSendDof);
-
-	stdMpi.getSendData(sendIt->first).push_back(mapGlobalBoundaryDof[globalSendDof]);
-      }
-    }     
-
-    stdMpi.updateSendDataSize();
-    stdMpi.recv(meshDistributor->getRecvDofs());
-    stdMpi.startCommunication();
-
-    for (RankToDofContainer::iterator recvIt = meshDistributor->getRecvDofs().begin();
-	 recvIt != meshDistributor->getRecvDofs().end(); ++recvIt) {
-      int i = 0;
-      for (DofContainer::iterator dofIt = recvIt->second.begin();
-	   dofIt != recvIt->second.end(); ++dofIt) {
-	int globalRecvDof = meshDistributor->mapLocalToGlobal(**dofIt);
-	mapGlobalBoundaryDof[globalRecvDof] = stdMpi.getRecvData(recvIt->first)[i++];
-      }
-    }
-  }
-
-
-  void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
-  {
-    FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
-
-    updateDofData();
-    
-    int nComponents = mat->getNumRows();
-    int nInteriorRows = nInteriorDofs * nComponents;
-    int nOverallInteriorRows = nOverallInteriorDofs * nComponents;
-    int nBoundaryRows = nBoundaryDofs * nComponents;
-    int nOverallBoundaryRows = nOverallBoundaryDofs * nComponents;
-
-    MSG("INTERIOR LOCAL MAT: %d x %d\n", nInteriorRows, nInteriorRows);
-    MSG("INTERIOR GLOBAL MAT: %d x %d\n", nOverallInteriorRows, nOverallInteriorRows);
-    MSG("BOUNDARY LOCAL MAT: %d x %d\n", nBoundaryRows, nBoundaryRows);
-    MSG("BOUNDARY GLOBAL MAT: %d x %d\n", nOverallBoundaryRows, nOverallBoundaryRows);
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
-		    nInteriorRows, nInteriorRows, 
-		    nOverallInteriorRows, nOverallInteriorRows,
-		    100, PETSC_NULL, 100, PETSC_NULL, &matA11);
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
-		    nBoundaryRows, nBoundaryRows, 
-		    nOverallBoundaryRows, nOverallBoundaryRows,
-		    100, PETSC_NULL, 100, PETSC_NULL, &matA22);
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
-		    nInteriorRows, nBoundaryRows, 
-		    nOverallInteriorRows, nOverallBoundaryRows,
-		    100, PETSC_NULL, 100, PETSC_NULL, &matA12);    
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
-		    nBoundaryRows, nInteriorRows, 
-		    nOverallBoundaryRows, nOverallInteriorRows,
-		    100, PETSC_NULL, 100, PETSC_NULL, &matA21);
-
-
-    for (int i = 0; i < nComponents; i++)
-      for (int j = 0; j < nComponents; j++)
-	if ((*mat)[i][j])
-	  setDofMatrix((*mat)[i][j], nComponents, i, j);
-
-    MatAssemblyBegin(matA11, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matA11, MAT_FINAL_ASSEMBLY);
-
-    MatAssemblyBegin(matA12, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matA12, MAT_FINAL_ASSEMBLY);
-
-    MatAssemblyBegin(matA21, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matA21, MAT_FINAL_ASSEMBLY);
-
-    MatAssemblyBegin(matA22, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matA22, MAT_FINAL_ASSEMBLY);
-
-    Mat tmpMat[2][2];
-    tmpMat[0][0] = matA11;
-    tmpMat[0][1] = matA12;
-    tmpMat[1][0] = matA21;
-    tmpMat[1][1] = matA22;
-
-    MatCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, 2, PETSC_NULL, 
-		  &tmpMat[0][0], &petscMatrix);
-    MatNestSetVecType(petscMatrix, VECNEST);
-    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
-
-
-    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
-    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
-
-    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
-    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
-    VecSetType(petscRhsVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
-    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
-    VecSetType(petscSolVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
-    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
-    VecSetType(petscTmpVec, VECMPI);
-
-
-    for (int i = 0; i < nComponents; i++)
-      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
-
-    VecAssemblyBegin(petscRhsVec);
-    VecAssemblyEnd(petscRhsVec);
-  }
-
-
-  void PetscSolverSchur::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
-  {
-    FUNCNAME("PetscSolverSchur::solvePetscMatrix()");
-
-    int nComponents = vec.getSize();
-
-    KSPCreate(PETSC_COMM_WORLD, &solver);
-
-    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
-    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
-    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
-    KSPSetFromOptions(solver);
-
-    KSPGetPC(solver, &pc);
-    PCSetType(pc, PCFIELDSPLIT);
-
-    IS interiorIs;
-    ISCreateStride(PETSC_COMM_WORLD, 
-		   nInteriorDofs * nComponents,
-		   (rStartInteriorDofs + rStartBoundaryDofs) * nComponents, 
-		   1, &interiorIs);
-    PCFieldSplitSetIS(pc, "interior", interiorIs);
-
-
-    IS boundaryIs;
-    ISCreateStride(PETSC_COMM_WORLD,
-		   nBoundaryDofs * nComponents,
-		   (rStartInteriorDofs + rStartBoundaryDofs + nInteriorDofs) * nComponents, 
-		   1, &boundaryIs);
-    PCFieldSplitSetIS(pc, "boundary", boundaryIs);
-
-    PCSetFromOptions(pc);
-
-
-    KSPSolve(solver, petscRhsVec, petscSolVec);
-
-    // === Transfere values from PETSc's solution vectors to AMDiS vectors. ===
-
-    PetscScalar *vecPointer;
-    VecGetArray(petscSolVec, &vecPointer);
-
-    for (int i = 0; i < nComponents; i++) {
-      DOFVector<double>::Iterator dofIt(vec.getDOFVector(i), USED_DOFS);
-      for (dofIt.reset(); !dofIt.end(); ++dofIt) {
-	DegreeOfFreedom globalRowDof = 
-	  meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
-	if (boundaryDofs.count(globalRowDof)) {
-	  int index =
-	    (mapGlobalBoundaryDof[globalRowDof] - rStartBoundaryDofs + nInteriorDofs) * (i + 1);
-	  *dofIt = vecPointer[index];
-	} else {
-	  int index = 
-	    (mapGlobalInteriorDof[globalRowDof] - rStartInteriorDofs) * (i + 1);
-	  *dofIt = vecPointer[index];
-	}
-      }
-    }
-
-    VecRestoreArray(petscSolVec, &vecPointer);
-
-
-    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
-    // === more than one partition.                                       ===
-    meshDistributor->synchVector(vec);
-
-
-
-    // === Print iteration counter and residual norm of the solution. ===
-    printSolutionInfo(adaptInfo);
-
-
-    // === Destroy PETSC's variables. ===
-
-    VecDestroy(petscRhsVec);
-    VecDestroy(petscSolVec);
-    VecDestroy(petscTmpVec);
-
-    MatDestroy(matA11);
-    MatDestroy(matA12);
-    MatDestroy(matA21);
-    MatDestroy(matA22);
-    MatDestroy(petscMatrix);
-
-    KSPDestroy(solver);
-  }
-  
-
-  void PetscSolverSchur::setDofMatrix(DOFMatrix* mat, int dispMult, 
-				      int dispAddRow, int dispAddCol)
-  {
-    FUNCNAME("PetscSolverSchur::setDofMatrix()");
-
-    TEST_EXIT(mat)("No DOFMatrix!\n");
-
-    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
-    namespace traits= mtl::traits;
-    typedef DOFMatrix::base_matrix_type Matrix;
-
-    traits::col<Matrix>::type col(mat->getBaseMatrix());
-    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
-
-    typedef traits::range_generator<row, Matrix>::type cursor_type;
-    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
-
-    vector<int> colsBoundary, colsInterior;
-    vector<double> valuesBoundary, valuesInterior;
-    colsBoundary.reserve(300);
-    colsInterior.reserve(300);
-    valuesBoundary.reserve(300);
-    valuesInterior.reserve(300);
-
-    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
-	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
-
-      // Global index of the current row DOF.
-      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-     
-      colsBoundary.clear();
-      colsInterior.clear();
-      valuesBoundary.clear();
-      valuesInterior.clear();
-      
-      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-	   icursor != icend; ++icursor) {
-	int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
-
-	if (boundaryDofs.count(globalColDof)) {
-	  int colIndex = 
-	    mapGlobalBoundaryDof[globalColDof] * dispMult + dispAddCol;
-	  
-	  colsBoundary.push_back(colIndex);
-	  valuesBoundary.push_back(value(*icursor));
-	} else {
-	  int colIndex = 
-	    mapGlobalInteriorDof[globalColDof] * dispMult + dispAddCol;
-
-	  colsInterior.push_back(colIndex);
-	  valuesInterior.push_back(value(*icursor));
-	}
-      }
-
-      if (boundaryDofs.count(globalRowDof)) {
-	int rowIndex = 
-	  mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAddRow;
-
- 	MatSetValues(matA22, 1, &rowIndex, colsBoundary.size(), 
- 		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
- 	MatSetValues(matA21, 1, &rowIndex, colsInterior.size(), 
- 		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
-      } else {
-	int rowIndex = 
-	  mapGlobalInteriorDof[globalRowDof] * dispMult + dispAddRow;
-
-  	MatSetValues(matA11, 1, &rowIndex, colsInterior.size(), 
-  		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
-  	MatSetValues(matA12, 1, &rowIndex, colsBoundary.size(), 
-  		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
-      }
-    }
-  }
-
-
-  void PetscSolverSchur::setDofVector(Vec& petscVec, DOFVector<double>* vec,
-				      int dispMult, int dispAdd, bool rankOnly)
-  {
-    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
-    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
-      if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
-	continue;
-
-      // Calculate global row index of the dof.
-      DegreeOfFreedom globalRowDof = 
-	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
-
-      double value = *dofIt;
-
-      if (boundaryDofs.count(globalRowDof)) {
-	int index = 
-	  (rStartInteriorDofs + 
-	   nInteriorDofs +
-	   mapGlobalBoundaryDof[globalRowDof]) * dispMult + dispAdd;
-	VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
-      } else {
-	int index = 
-	  (rStartBoundaryDofs + 
-	   mapGlobalInteriorDof[globalRowDof]) * dispMult + dispAdd;
-	VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
-      }
-    }
-  }
-#endif
-
 }
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index 25a7e3c8..496959d3 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -69,6 +69,11 @@ namespace AMDiS {
     /// Use PETSc to solve the linear system of equations
     virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
 
+    virtual Flag getBoundaryDofRequirement()
+    {
+      return 0;
+    }
+
   protected:
     void printSolutionInfo(AdaptInfo* adaptInfo,
 			   bool iterationCounter = true,
@@ -92,99 +97,6 @@ namespace AMDiS {
   };
 
 
-  class PetscSolverGlobalMatrix : public PetscSolver
-  {
-  public:
-    PetscSolverGlobalMatrix()
-      : PetscSolver(),
-	d_nnz(NULL),
-	o_nnz(NULL),
-	lastMeshNnz(0),
-	zeroStartVector(false)
-    {
-      GET_PARAMETER(0, "parallel->use zero start vector", "%d", &zeroStartVector);
-    }
-
-    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
-
-    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);    
-
-  protected:
-    /// Creates a new non zero pattern structure for the PETSc matrix. 
-    void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
-    
-    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
-    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
-		      int dispAddRow = 0, int dispAddCol = 0);
-
-    /// Takes a DOF vector and sends its values to a given PETSc vector.
-    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
-		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
-
-  protected:
-    /// Arrays definig the non zero pattern of Petsc's matrix.
-    int *d_nnz, *o_nnz;
-
-    /** \brief
-     * Stores the mesh change index of the mesh the nnz structure was created for.
-     * Therefore, if the mesh change index is higher than this value, we have to create
-     * a new nnz structure for PETSc matrices, because the mesh has been changed and
-     * therefore also the assembled matrix structure.
-     */
-    int lastMeshNnz;
-
-    bool zeroStartVector;
-  };
-
-
-#ifdef HAVE_PETSC_DEV
-  class PetscSolverSchur : public PetscSolver
-  {
-  public:
-    PetscSolverSchur() 
-      : PetscSolver()
-    {}
-
-    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
-
-    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
-    
-  protected:
-    void updateDofData();
-
-    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
-    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
-		      int dispAddRow = 0, int dispAddCol = 0);
-
-    /// Takes a DOF vector and sends its values to a given PETSc vector.
-    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
-		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
-
-  protected:
-    int nBoundaryDofs;
-
-    int rStartBoundaryDofs;
-
-    int nOverallBoundaryDofs;
-
-    std::set<DegreeOfFreedom> boundaryDofs;
-
-    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalBoundaryDof;
-
-    int nInteriorDofs;
-
-    int rStartInteriorDofs;
-
-    int nOverallInteriorDofs;
-
-    std::set<DegreeOfFreedom> interiorDofs;    
-
-    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalInteriorDof;
-
-    Mat matA11, matA12, matA21, matA22;
-  };
-#endif
-
 } // namespace AMDiS
 
 #endif
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
new file mode 100644
index 00000000..13608e10
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -0,0 +1,640 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+#include "parallel/PetscSolverGlobalMatrix.h"
+#include "parallel/StdMpi.h"
+#include "parallel/MpiHelper.h"
+
+namespace AMDiS {
+
+  PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
+  {    
+    if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
+      cout << "[0]  Petsc-Iteration " << iter << ": " << rnorm << endl;
+
+    return 0;
+  }
+
+
+  void PetscSolverGlobalMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
+
+    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
+    TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
+    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
+
+    double wtime = MPI::Wtime();
+    int nComponents = mat->getNumRows();
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
+
+    // === Create PETSc vector (rhs, solution and a temporary vector). ===
+
+    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
+    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
+    VecSetType(petscRhsVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
+    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
+    VecSetType(petscSolVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
+    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
+    VecSetType(petscTmpVec, VECMPI);
+
+    int recvAllValues = 0;
+    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
+    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
+
+    if (!d_nnz || recvAllValues != 0) {
+      if (d_nnz) {
+	delete [] d_nnz;
+	d_nnz = NULL;
+	delete [] o_nnz;
+	o_nnz = NULL;
+      }
+
+      createPetscNnzStructure(mat);
+      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
+    }
+
+
+    // === Create PETSc matrix with the computed nnz data structure. ===
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
+		    0, d_nnz, 0, o_nnz, &petscMatrix);
+    
+#if (DEBUG != 0)
+    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
+#endif
+
+#if (DEBUG != 0)
+    int a, b;
+    MatGetOwnershipRange(petscMatrix, &a, &b);
+    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
+      ("Wrong matrix ownership range!\n");
+    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
+      ("Wrong matrix ownership range!\n");
+#endif
+
+
+    // === Transfer values from DOF matrices to the PETSc matrix. === 
+
+    for (int i = 0; i < nComponents; i++)
+      for (int j = 0; j < nComponents; j++)
+	if ((*mat)[i][j])
+	  setDofMatrix((*mat)[i][j], nComponents, i, j);
+
+#if (DEBUG != 0)
+    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
+#endif
+
+    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
+
+#if (DEBUG != 0)
+    MSG("Fill petsc matrix 3 needed %.5f seconds\n", 
+	TIME_USED(MPI::Wtime(), wtime));
+#endif
+
+    // === Transfer values from DOF vector to the PETSc vector. === 
+
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
+
+    VecAssemblyBegin(petscRhsVec);
+    VecAssemblyEnd(petscRhsVec);
+
+    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
+  }
+
+
+  void PetscSolverGlobalMatrix::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::solvePetscMatrix()");
+
+    int nComponents = vec.getSize();
+
+    // === Set old solution to be initiual guess for PETSc solver. ===
+    if (!zeroStartVector) {
+      VecSet(petscSolVec, 0.0);
+      
+      for (int i = 0; i < nComponents; i++)
+	setDofVector(petscSolVec, vec.getDOFVector(i), nComponents, i, true);
+      
+      VecAssemblyBegin(petscSolVec);
+      VecAssemblyEnd(petscSolVec);
+    }
+
+    // === Init PETSc solver. ===
+    KSPCreate(PETSC_COMM_WORLD, &solver);
+    KSPGetPC(solver, &pc);
+    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
+    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
+    KSPSetType(solver, KSPBCGS);
+    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
+    KSPSetFromOptions(solver);
+    PCSetFromOptions(pc);
+
+    // Do not delete the solution vector, use it for the initial guess.
+    if (!zeroStartVector)
+      KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
+
+   
+    // PETSc.
+    KSPSolve(solver, petscRhsVec, petscSolVec);
+
+    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
+    int nRankDofs = meshDistributor->getNumberRankDofs();
+    PetscScalar *vecPointer;
+    VecGetArray(petscSolVec, &vecPointer);
+
+    for (int i = 0; i < nComponents; i++) {
+      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
+      for (int j = 0; j < nRankDofs; j++)
+	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
+	  vecPointer[j * nComponents + i]; 
+    }
+
+    VecRestoreArray(petscSolVec, &vecPointer);
+
+
+    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
+    // === more than one partition.                                       ===
+    meshDistributor->synchVector(vec);
+
+
+    // Print iteration counter and residual norm of the solution.
+    printSolutionInfo(adaptInfo);
+
+
+    // === Destroy PETSc's variables. ===
+
+    MatDestroy(petscMatrix);
+    VecDestroy(petscRhsVec);
+    VecDestroy(petscSolVec);
+    VecDestroy(petscTmpVec);
+    KSPDestroy(solver);
+  }
+
+
+  void PetscSolverGlobalMatrix::setDofMatrix(DOFMatrix* mat, int dispMult, 
+				      int dispAddRow, int dispAddCol)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()");
+
+    TEST_EXIT(mat)("No DOFMatrix!\n");
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits= mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+
+    traits::col<Matrix>::type col(mat->getBaseMatrix());
+    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
+
+    typedef traits::range_generator<row, Matrix>::type cursor_type;
+    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+
+    vector<int> cols;
+    vector<double> values;
+    cols.reserve(300);
+    values.reserve(300);
+    
+    vector<int> globalCols;
+
+
+    // === Traverse all rows of the dof matrix and insert row wise the values ===
+    // === to the PETSc matrix.                                               ===
+
+    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
+	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
+
+      // Global index of the current row DOF.
+      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+      // Test if the current row DOF is a periodic DOF.
+      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
+
+      if (!periodicRow) {
+	// === Row DOF index is not periodic. ===
+
+	// Calculate PETSc row index.
+
+	int rowIndex = globalRowDof * dispMult + dispAddRow;
+
+	cols.clear();
+	values.clear();
+
+	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	     icursor != icend; ++icursor) {
+
+	  // Global index of the current column index.
+	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+	  // Test if the current col dof is a periodic dof.
+	  bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
+	  // Calculate PETSc col index.
+	  int colIndex = globalColDof * dispMult + dispAddCol;
+
+	  // Ignore all zero entries, expect it is a diagonal entry.
+ 	  if (value(*icursor) == 0.0 && rowIndex != colIndex)
+ 	    continue;
+
+	  if (!periodicCol) {
+	    // Calculate the exact position of the column index in the PETSc matrix.
+	    cols.push_back(colIndex);
+	    values.push_back(value(*icursor));
+	  } else {
+	    // === Row index is not periodic, but column index is. ===
+
+	    // Create set of all periodic associations of the column index.
+	    std::set<int> perAsc;
+	    std::set<int>& perColAsc = 
+	      meshDistributor->getPerDofAssociations(globalColDof);
+	    for (std::set<int>::iterator it = perColAsc.begin(); 
+		 it != perColAsc.end(); ++it)
+	      if (*it >= -3)
+		perAsc.insert(*it);
+    
+	    // Scale value to the number of periodic associations of the column index.
+	    double scaledValue = 
+	      value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
+
+	    
+	    // === Create set of all matrix column indices due to the periodic ===
+	    // === associations of the column DOF index.                       ===
+
+	    vector<int> newCols;
+
+	    // First, add the original matrix index.
+	    newCols.push_back(globalColDof);
+	    
+	    // And add all periodic matrix indices.
+	    for (std::set<int>::iterator it = perAsc.begin(); 
+		 it != perAsc.end(); ++it) {
+	      int nCols = static_cast<int>(newCols.size());
+
+	      for (int i = 0; i < nCols; i++) {
+ 		TEST_EXIT_DBG(meshDistributor->isPeriodicDof(newCols[i], *it))
+ 		  ("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
+		   *it, newCols[i]);
+
+		newCols.push_back(meshDistributor->getPeriodicMapping(newCols[i], *it));
+	      }
+	    }
+
+	    for (unsigned int i = 0; i < newCols.size(); i++) {
+	      cols.push_back(newCols[i] * dispMult + dispAddCol);
+	      values.push_back(scaledValue);	      
+	    }
+	  }
+	}
+
+	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
+		     &(cols[0]), &(values[0]), ADD_VALUES);	
+      } else {
+	// === Row DOF index is periodic. ===
+
+	// Because this row is periodic, we will have to add the entries of this 
+	// matrix row to multiple rows. The following maps store to each row an
+	// array of column indices and values of the entries that must be added to
+	// the PETSc matrix.
+	map<int, vector<int> > colsMap;
+	map<int, vector<double> > valsMap;
+
+	// Traverse all column entries.
+	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	     icursor != icend; ++icursor) {
+
+	  // Global index of the current column index.
+	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+
+	  // Ignore all zero entries, expect it is a diagonal entry.
+ 	  if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
+ 	    continue;
+
+
+	  // === Add all periodic associations of both, the row and the column ===
+	  // === indices to the set perAsc.                                    ===
+
+	  std::set<int> perAsc;
+
+	  if (meshDistributor->isPeriodicDof(globalColDof)) {
+	    std::set<int>& perColAsc = 
+	      meshDistributor->getPerDofAssociations(globalColDof);
+	    for (std::set<int>::iterator it = perColAsc.begin(); 
+		 it != perColAsc.end(); ++it)
+	      if (*it >= -3)
+		perAsc.insert(*it);
+	  }
+
+	  std::set<int>& perRowAsc = 
+	    meshDistributor->getPerDofAssociations(globalRowDof);
+	  for (std::set<int>::iterator it = perRowAsc.begin(); 
+	       it != perRowAsc.end(); ++it)
+	    if (*it >= -3)
+	      perAsc.insert(*it);
+
+	  // Scale the value with respect to the number of periodic associations.
+	  double scaledValue = 
+	    value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
+
+
+	  // === Create all matrix entries with respect to the periodic  ===
+	  // === associations of the row and column indices.             ===
+
+	  vector<pair<int, int> > entry;
+	  
+	  // First, add the original entry.
+	  entry.push_back(make_pair(globalRowDof, globalColDof));
+
+	  // Then, traverse the periodic associations of the row and column indices
+	  // and create the corresponding entries.
+	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
+	    int nEntry = static_cast<int>(entry.size());
+	    for (int i = 0; i < nEntry; i++) {
+	      int perRowDof = 0;
+	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first))
+		perRowDof = meshDistributor->getPeriodicMapping(entry[i].first, *it);
+	      else
+		perRowDof = entry[i].first;
+
+	      int perColDof;
+	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second))
+		perColDof = meshDistributor->getPeriodicMapping(entry[i].second, *it);
+	      else
+		perColDof = entry[i].second;	      	      
+	      
+
+	      entry.push_back(make_pair(perRowDof, perColDof));
+	    }
+	  }
+
+
+	  // === Translate the matrix entries to PETSc's matrix.
+
+	  for (vector<pair<int, int> >::iterator eIt = entry.begin(); 
+	       eIt != entry.end(); ++eIt) {
+	    // Calculate row and column indices of the PETSc matrix.
+	    int rowIndex = eIt->first * dispMult + dispAddRow;
+	    int colIndex = eIt->second * dispMult + dispAddCol;
+
+	    colsMap[rowIndex].push_back(colIndex);
+	    valsMap[rowIndex].push_back(scaledValue);
+	  }
+	}
+
+
+	// === Finally, add all periodic rows to the PETSc matrix. ===
+
+	for (map<int, vector<int> >::iterator rowIt = colsMap.begin();
+	     rowIt != colsMap.end(); ++rowIt) {
+	  TEST_EXIT_DBG(rowIt->second.size() == valsMap[rowIt->first].size())
+	    ("Should not happen!\n");
+
+	  int rowIndex = rowIt->first;
+	  MatSetValues(petscMatrix, 1, &rowIndex, rowIt->second.size(),
+		       &(rowIt->second[0]), &(valsMap[rowIt->first][0]), ADD_VALUES);
+	}
+      }
+    }
+  }
+
+
+  void PetscSolverGlobalMatrix::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+				      int dispMult, int dispAdd, bool rankOnly)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::setDofVector()");
+
+    // Traverse all used DOFs in the dof vector.
+    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
+    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+      if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
+	continue;
+
+      // Calculate global row index of the DOF.
+      DegreeOfFreedom globalRowDof = 
+	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
+      // Calculate PETSc index of the row DOF.
+      int index = globalRowDof * dispMult + dispAdd;
+
+      if (meshDistributor->isPeriodicDof(globalRowDof)) {
+	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
+	double value = *dofIt / (perAsc.size() + 1.0);
+	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
+
+	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
+	  int mappedDof = meshDistributor->getPeriodicMapping(globalRowDof, *perIt);
+	  int mappedIndex = mappedDof * dispMult + dispAdd;
+	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
+	}
+      } else {
+	// The DOF index is not periodic.
+	double value = *dofIt;
+	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
+      }
+    }
+  }
+
+
+  void PetscSolverGlobalMatrix::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::createPetscNnzStructure()");
+
+    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
+    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
+
+    int nComponents = mat->getNumRows();
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    d_nnz = new int[nRankRows];
+    o_nnz = new int[nRankRows];
+    for (int i = 0; i < nRankRows; i++) {
+      d_nnz[i] = 0;
+      o_nnz[i] = 0;
+    }
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits = mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+    typedef vector<pair<int, int> > MatrixNnzEntry;
+    typedef map<int, DofContainer> RankToDofContainer;
+
+    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
+    // that this rank will send to. These nnz entries will be assembled on this rank,
+    // but because the row DOFs are not DOFs of this rank they will be send to the
+    // owner of the row DOFs.
+    map<int, MatrixNnzEntry> sendMatrixEntry;
+
+    // Maps to each DOF that must be send to another rank the rank number of the
+    // receiving rank.
+    map<DegreeOfFreedom, int> sendDofToRank;
+
+
+    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
+    for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
+	 it != meshDistributor->getRecvDofs().end(); ++it) {
+      sendMatrixEntry[it->first].resize(0);
+
+      for (DofContainer::iterator dofIt = it->second.begin();
+	   dofIt != it->second.end(); ++dofIt)
+	sendDofToRank[**dofIt] = it->first;
+    }
+
+
+    std::set<int> recvFromRank;
+    for (RankToDofContainer::iterator it = meshDistributor->getSendDofs().begin();
+	 it != meshDistributor->getSendDofs().end(); ++it)
+      recvFromRank.insert(it->first);
+
+
+    for (int i = 0; i < nComponents; i++) {
+      for (int j = 0; j < nComponents; j++) {
+ 	if (!(*mat)[i][j])
+	  continue;
+
+	Matrix bmat = (*mat)[i][j]->getBaseMatrix();
+
+	traits::col<Matrix>::type col(bmat);
+	traits::const_value<Matrix>::type value(bmat);
+	  
+	typedef traits::range_generator<row, Matrix>::type cursor_type;
+	typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+	
+	for (cursor_type cursor = begin<row>(bmat), 
+	       cend = end<row>(bmat); cursor != cend; ++cursor) {
+	  
+	  int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+
+	  vector<int> rows;
+	  rows.push_back(globalRowDof);
+	  vector<int> rowRank;
+	  if (meshDistributor->getIsRankDof(*cursor)) {
+	    rowRank.push_back(meshDistributor->getMpiRank());
+	  } else {
+	    // Find out who is the member of this DOF.
+	    TEST_EXIT_DBG(sendDofToRank.count(*cursor))("DOF %d has no receiver!\n", *cursor);
+	    
+	    rowRank.push_back(sendDofToRank[*cursor]);
+	  }
+
+	  // Map the local row number to the global DOF index and create from it
+	  // the global PETSc row index of this DOF.
+	  
+	  int petscRowIdx = globalRowDof * nComponents + i;
+	  
+	  if (meshDistributor->getIsRankDof(*cursor)) {
+	    	    
+	    // === The current row DOF is a rank dof, so create the corresponding ===
+	    // === nnz values directly on rank's nnz data.                        ===
+	    
+	    // This is the local row index of the local PETSc matrix.
+	    int localPetscRowIdx = 
+	      petscRowIdx - meshDistributor->getRstart() * nComponents;
+	    
+	    TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
+	      ("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
+	       *cursor, 
+	       meshDistributor->mapLocalToGlobal(*cursor), 
+	       petscRowIdx, 
+	       localPetscRowIdx, 
+	       meshDistributor->getRstart(), 
+	       nComponents, 
+	       nRankRows);
+	    
+	    
+	    // Traverse all non zero entries in this row.
+	    for (icursor_type icursor = begin<nz>(cursor), 
+		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
+	      int petscColIdx = 
+		meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
+	      
+	      if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
+		// The row DOF is a rank DOF, if also the column is a rank DOF, 
+		// increment the d_nnz values for this row, otherwise the o_nnz value.
+		if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
+		    petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
+		  d_nnz[localPetscRowIdx]++;
+		else
+		  o_nnz[localPetscRowIdx]++;
+	      }    
+	    }
+	  } else {
+	    // === The current row DOF is not a rank dof, i.e., it will be created ===
+	    // === on this rank, but after this it will be send to another rank    ===
+	    // === matrix. So we need to send also the corresponding nnz structure ===
+	    // === of this row to the corresponding rank.                          ===
+	    
+	    // Send all non zero entries to the member of the row DOF.
+	    int sendToRank = sendDofToRank[*cursor];
+	    
+	    for (icursor_type icursor = begin<nz>(cursor), 
+		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
+	      if (value(*icursor) != 0.0) {
+		int petscColIdx = 
+		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
+		
+		sendMatrixEntry[sendToRank].
+		  push_back(make_pair(petscRowIdx, petscColIdx));
+	      }
+	    }
+	    
+	  } // if (isRankDof[*cursor]) ... else ...
+	} // for each row in mat[i][j]
+      } 
+    }
+
+    // === Send and recv the nnz row structure to/from other ranks. ===
+
+    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
+    stdMpi.send(sendMatrixEntry);
+    for (std::set<int>::iterator it = recvFromRank.begin(); 
+	 it != recvFromRank.end(); ++it)
+      stdMpi.recv(*it);
+    stdMpi.startCommunication();
+
+
+    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
+    // === the PETSc nnz data structure.                                           ===
+
+    for (map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
+	 it != stdMpi.getRecvData().end(); ++it) {
+      if (it->second.size() > 0) {
+	for (unsigned int i = 0; i < it->second.size(); i++) {
+	  int r = it->second[i].first;
+	  int c = it->second[i].second;
+
+	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
+
+	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
+	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
+	     r, localRowIdx, nRankRows, it->first);
+	  
+	  if (c < meshDistributor->getRstart() * nComponents || 
+	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
+	    o_nnz[localRowIdx]++;
+	  else
+	    d_nnz[localRowIdx]++;
+	}
+      }
+    }
+
+    // The above algorithm for calculating the number of nnz per row over-
+    // approximates the value, i.e., the number is always equal or larger to 
+    // the real number of nnz values in the global parallel matrix. For small
+    // matrices, the problem may arise, that the result is larger than the
+    // number of elements in a row. This is fixed in the following.
+
+    if (nRankRows < 100) 
+      for (int i = 0; i < nRankRows; i++)
+	d_nnz[i] = std::min(d_nnz[i], nRankRows);
+  }
+
+}
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
new file mode 100644
index 00000000..c9efedb0
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
@@ -0,0 +1,82 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscSolverGlobalMatrix.h */
+
+#ifndef AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H
+#define AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H
+
+#include "AMDiS_fwd.h"
+#include "parallel/PetscSolver.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+
+  class PetscSolverGlobalMatrix : public PetscSolver
+  {
+  public:
+    PetscSolverGlobalMatrix()
+      : PetscSolver(),
+	d_nnz(NULL),
+	o_nnz(NULL),
+	lastMeshNnz(0),
+	zeroStartVector(false)
+    {
+      GET_PARAMETER(0, "parallel->use zero start vector", "%d", &zeroStartVector);
+    }
+
+    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
+
+    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);    
+
+  protected:
+    /// Creates a new non zero pattern structure for the PETSc matrix. 
+    void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
+    
+    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
+    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
+		      int dispAddRow = 0, int dispAddCol = 0);
+
+    /// Takes a DOF vector and sends its values to a given PETSc vector.
+    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
+
+  protected:
+    /// Arrays definig the non zero pattern of Petsc's matrix.
+    int *d_nnz, *o_nnz;
+
+    /** \brief
+     * Stores the mesh change index of the mesh the nnz structure was created for.
+     * Therefore, if the mesh change index is higher than this value, we have to create
+     * a new nnz structure for PETSc matrices, because the mesh has been changed and
+     * therefore also the assembled matrix structure.
+     */
+    int lastMeshNnz;
+
+    bool zeroStartVector;
+  };
+
+
+}
+
+#endif
+
diff --git a/AMDiS/src/parallel/PetscSolverSchur.cc b/AMDiS/src/parallel/PetscSolverSchur.cc
new file mode 100644
index 00000000..cfc10e91
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverSchur.cc
@@ -0,0 +1,425 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+#include "parallel/PetscSolverSchur.h"
+#include "parallel/StdMpi.h"
+#include "parallel/MpiHelper.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+
+#ifdef HAVE_PETSC_DEV 
+  void PetscSolverSchur::updateDofData(int nComponents)
+  {
+    FUNCNAME("PetscSolverSchur::updateDofData()");
+
+    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
+
+    MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
+
+    typedef map<int, DofContainer> RankToDofContainer;
+    typedef map<DegreeOfFreedom, bool> DofIndexToBool;
+
+    boundaryDofs.clear();
+    std::set<DegreeOfFreedom> boundaryLocalDofs;
+    RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
+    for (RankToDofContainer::iterator rankIt = sendDofs.begin(); 
+	 rankIt != sendDofs.end(); ++rankIt) {
+      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
+	   dofIt != rankIt->second.end(); ++dofIt) {
+	boundaryLocalDofs.insert(**dofIt);
+	  
+	boundaryDofs.insert(meshDistributor->mapLocalToGlobal(**dofIt));
+      }
+    }
+      
+    nBoundaryDofs = boundaryDofs.size();
+    mpi::getDofNumbering(mpiComm, nBoundaryDofs, 
+			 rStartBoundaryDofs, nOverallBoundaryDofs);
+
+    int counter = rStartBoundaryDofs;
+    mapGlobalBoundaryDof.clear();
+    for (std::set<DegreeOfFreedom>::iterator it = boundaryDofs.begin();
+	 it != boundaryDofs.end(); ++it)
+      mapGlobalBoundaryDof[*it] = counter++;
+
+
+
+    std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
+    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
+    for (RankToDofContainer::iterator rankIt = recvDofs.begin(); 
+	 rankIt != recvDofs.end(); ++rankIt)
+      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
+	   dofIt != rankIt->second.end(); ++dofIt)
+	otherBoundaryLocalDofs.insert(**dofIt);
+      
+    interiorDofs.clear();
+    DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
+    for (DofIndexToBool::iterator dofIt = isRankDof.begin(); 
+	 dofIt != isRankDof.end(); ++dofIt) {
+      if (dofIt->second && 
+	  boundaryLocalDofs.count(dofIt->first) == 0 && 
+	  otherBoundaryLocalDofs.count(dofIt->first) == 0)
+	interiorDofs.insert(meshDistributor->mapLocalToGlobal(dofIt->first));
+    }
+      
+    nInteriorDofs = interiorDofs.size();
+    mpi::getDofNumbering(mpiComm, nInteriorDofs, 
+			 rStartInteriorDofs, nOverallInteriorDofs);
+
+    counter = rStartInteriorDofs;
+    mapGlobalInteriorDof.clear();
+    for (std::set<DegreeOfFreedom>::iterator it = interiorDofs.begin();
+	 it != interiorDofs.end(); ++it)
+      mapGlobalInteriorDof[*it] = counter++;
+
+
+    TEST_EXIT_DBG(nInteriorDofs > 0)("Should not happen!\n");
+
+
+    StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
+    for (RankToDofContainer::iterator sendIt = meshDistributor->getSendDofs().begin();
+	 sendIt != meshDistributor->getSendDofs().end(); ++sendIt) {
+      stdMpi.getSendData(sendIt->first).resize(0);
+      stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
+      for (DofContainer::iterator dofIt = sendIt->second.begin();
+	   dofIt != sendIt->second.end(); ++dofIt) {
+	int globalSendDof = meshDistributor->mapLocalToGlobal(**dofIt);
+
+	TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalSendDof))
+	  ("No mapping for boundary DOF %d!\n", globalSendDof);
+
+	stdMpi.getSendData(sendIt->first).push_back(mapGlobalBoundaryDof[globalSendDof]);
+      }
+    }     
+
+    stdMpi.updateSendDataSize();
+    stdMpi.recv(meshDistributor->getRecvDofs());
+    stdMpi.startCommunication();
+
+    for (RankToDofContainer::iterator recvIt = meshDistributor->getRecvDofs().begin();
+	 recvIt != meshDistributor->getRecvDofs().end(); ++recvIt) {
+      int i = 0;
+      for (DofContainer::iterator dofIt = recvIt->second.begin();
+	   dofIt != recvIt->second.end(); ++dofIt) {
+	int globalRecvDof = meshDistributor->mapLocalToGlobal(**dofIt);
+	mapGlobalBoundaryDof[globalRecvDof] = stdMpi.getRecvData(recvIt->first)[i++];
+	boundaryDofs.insert(globalRecvDof);
+      }
+    }
+
+
+    // === Create PETSc IS structurs for interior and boundary DOFs. ===
+
+    ISCreateStride(PETSC_COMM_WORLD, 
+		   nInteriorDofs * nComponents,
+		   (rStartInteriorDofs + rStartBoundaryDofs) * nComponents, 
+		   1, &interiorIs);
+
+    ISCreateStride(PETSC_COMM_WORLD,
+		   nBoundaryDofs * nComponents,
+		   (rStartInteriorDofs + rStartBoundaryDofs + nInteriorDofs) * nComponents, 
+		   1, &boundaryIs);
+  }
+
+
+  void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
+  {
+    FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
+   
+    int nComponents = mat->getNumRows();
+    updateDofData(nComponents);
+
+    int nInteriorRows = nInteriorDofs * nComponents;
+    int nOverallInteriorRows = nOverallInteriorDofs * nComponents;
+    int nBoundaryRows = nBoundaryDofs * nComponents;
+    int nOverallBoundaryRows = nOverallBoundaryDofs * nComponents;
+
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nInteriorRows, nInteriorRows, 
+		    nOverallInteriorRows, nOverallInteriorRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA11);
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nBoundaryRows, nBoundaryRows, 
+		    nOverallBoundaryRows, nOverallBoundaryRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA22);
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nInteriorRows, nBoundaryRows, 
+		    nOverallInteriorRows, nOverallBoundaryRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA12);    
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, 
+		    nBoundaryRows, nInteriorRows, 
+		    nOverallBoundaryRows, nOverallInteriorRows,
+		    100, PETSC_NULL, 100, PETSC_NULL, &matA21);
+
+
+    for (int i = 0; i < nComponents; i++)
+      for (int j = 0; j < nComponents; j++)
+	if ((*mat)[i][j])
+	  setDofMatrix((*mat)[i][j], nComponents, i, j);
+
+    MatAssemblyBegin(matA11, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA11, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA12, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA12, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA21, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA21, MAT_FINAL_ASSEMBLY);
+
+    MatAssemblyBegin(matA22, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(matA22, MAT_FINAL_ASSEMBLY);
+
+    Mat tmpMat[2][2];
+    tmpMat[0][0] = matA11;
+    tmpMat[0][1] = matA12;
+    tmpMat[1][0] = matA21;
+    tmpMat[1][1] = matA22;
+
+    IS tmpIS[2];
+    tmpIS[0] = interiorIs;
+    tmpIS[1] = boundaryIs;
+
+    MatCreateNest(PETSC_COMM_WORLD, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], &petscMatrix);
+    MatNestSetVecType(petscMatrix, VECNEST);
+    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
+
+
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
+
+    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
+    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
+    VecSetType(petscRhsVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
+    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
+    VecSetType(petscSolVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
+    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
+    VecSetType(petscTmpVec, VECMPI);
+
+
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
+
+    VecAssemblyBegin(petscRhsVec);
+    VecAssemblyEnd(petscRhsVec);
+  }
+
+
+  void PetscSolverSchur::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
+  {
+    FUNCNAME("PetscSolverSchur::solvePetscMatrix()");
+
+    int nComponents = vec.getSize();
+
+    KSPCreate(PETSC_COMM_WORLD, &solver);
+
+    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
+    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
+    KSPSetFromOptions(solver);
+
+    KSPGetPC(solver, &pc);
+    PCSetType(pc, PCFIELDSPLIT);
+    PCFieldSplitSetIS(pc, "interior", interiorIs);
+    PCFieldSplitSetIS(pc, "boundary", boundaryIs);
+    PCSetFromOptions(pc);
+
+
+    KSPSolve(solver, petscRhsVec, petscSolVec);
+
+    // === Transfere values from PETSc's solution vectors to AMDiS vectors. ===
+
+    PetscScalar *vecPointer;
+    VecGetArray(petscSolVec, &vecPointer);
+
+    for (int i = 0; i < nComponents; i++) {
+      DOFVector<double>::Iterator dofIt(vec.getDOFVector(i), USED_DOFS);
+      for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+	DegreeOfFreedom globalRowDof = 
+	  meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
+	if (boundaryDofs.count(globalRowDof)) {
+	  int index =
+	    (mapGlobalBoundaryDof[globalRowDof] - rStartBoundaryDofs + nInteriorDofs) * (i + 1);
+	  *dofIt = vecPointer[index];
+	} else {
+	  int index = 
+	    (mapGlobalInteriorDof[globalRowDof] - rStartInteriorDofs) * (i + 1);
+	  *dofIt = vecPointer[index];
+	}
+      }
+    }
+
+    VecRestoreArray(petscSolVec, &vecPointer);
+
+
+    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
+    // === more than one partition.                                       ===
+    meshDistributor->synchVector(vec);
+
+
+
+    // === Print iteration counter and residual norm of the solution. ===
+    printSolutionInfo(adaptInfo);
+
+
+    // === Destroy PETSC's variables. ===
+
+    VecDestroy(petscRhsVec);
+    VecDestroy(petscSolVec);
+    VecDestroy(petscTmpVec);
+
+    MatDestroy(matA11);
+    MatDestroy(matA12);
+    MatDestroy(matA21);
+    MatDestroy(matA22);
+    MatDestroy(petscMatrix);
+
+    KSPDestroy(solver);
+  }
+  
+
+  void PetscSolverSchur::setDofMatrix(DOFMatrix* mat, int dispMult, 
+				      int dispAddRow, int dispAddCol)
+  {
+    FUNCNAME("PetscSolverSchur::setDofMatrix()");
+
+    TEST_EXIT(mat)("No DOFMatrix!\n");
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits= mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+
+    traits::col<Matrix>::type col(mat->getBaseMatrix());
+    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
+
+    typedef traits::range_generator<row, Matrix>::type cursor_type;
+    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+
+    vector<int> colsBoundary, colsInterior;
+    vector<double> valuesBoundary, valuesInterior;
+    colsBoundary.reserve(300);
+    colsInterior.reserve(300);
+    valuesBoundary.reserve(300);
+    valuesInterior.reserve(300);
+
+    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
+	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
+
+      // Global index of the current row DOF.
+      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+     
+      colsBoundary.clear();
+      colsInterior.clear();
+      valuesBoundary.clear();
+      valuesInterior.clear();
+      
+      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	   icursor != icend; ++icursor) {
+	int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+
+	if (boundaryDofs.count(globalColDof)) {
+	  TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalColDof))
+	    ("Should not happen!\n");
+
+	  int colIndex = 
+	    mapGlobalBoundaryDof[globalColDof] * dispMult + dispAddCol;
+	  
+	  colsBoundary.push_back(colIndex);
+	  valuesBoundary.push_back(value(*icursor));
+	} else {
+	  TEST_EXIT_DBG(mapGlobalInteriorDof.count(globalColDof))
+	    ("Cannot find global interior mapping for global column DOF %d!\n", 
+	     globalColDof);
+
+	  int colIndex = 
+	    mapGlobalInteriorDof[globalColDof] * dispMult + dispAddCol;
+
+	  colsInterior.push_back(colIndex);
+	  valuesInterior.push_back(value(*icursor));
+	}
+      }
+
+      if (boundaryDofs.count(globalRowDof)) {
+	TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalRowDof))
+	  ("Should not happen!\n");
+
+	int rowIndex = 
+	  mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAddRow;
+
+ 	MatSetValues(matA22, 1, &rowIndex, colsBoundary.size(), 
+ 		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
+ 	MatSetValues(matA21, 1, &rowIndex, colsInterior.size(), 
+ 		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
+      } else {
+	TEST_EXIT_DBG(mapGlobalInteriorDof.count(globalRowDof))
+	  ("Cannot find global interior mapping for global row DOF %d!\n", 
+	   globalRowDof);
+
+	int rowIndex = 
+	  mapGlobalInteriorDof[globalRowDof] * dispMult + dispAddRow;
+
+  	MatSetValues(matA11, 1, &rowIndex, colsInterior.size(), 
+  		     &(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
+  	MatSetValues(matA12, 1, &rowIndex, colsBoundary.size(), 
+  		     &(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
+      }
+    }
+  }
+
+
+  void PetscSolverSchur::setDofVector(Vec& petscVec, DOFVector<double>* vec,
+				      int dispMult, int dispAdd, bool rankOnly)
+  {
+    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
+    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+      if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
+	continue;
+
+      // Calculate global row index of the dof.
+      DegreeOfFreedom globalRowDof = 
+	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
+
+      double value = *dofIt;
+
+      if (boundaryDofs.count(globalRowDof)) {
+	TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalRowDof))
+	  ("Should not happen!\n");
+
+	int index = 
+	  (rStartInteriorDofs + 
+	   nInteriorDofs +
+	   mapGlobalBoundaryDof[globalRowDof]) * dispMult + dispAdd;
+	VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
+      } else {
+	TEST_EXIT_DBG(mapGlobalInteriorDof.count(globalRowDof))
+	  ("Should not happen!\n");
+	
+	int index = 
+	  (rStartBoundaryDofs + 
+	   mapGlobalInteriorDof[globalRowDof]) * dispMult + dispAdd;
+	VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
+      }
+    }
+  }
+#endif
+
+}
diff --git a/AMDiS/src/parallel/PetscSolverSchur.h b/AMDiS/src/parallel/PetscSolverSchur.h
new file mode 100644
index 00000000..2b7f68cb
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverSchur.h
@@ -0,0 +1,90 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscSolverSchur.h */
+
+#include "parallel/PetscSolver.h"
+
+#ifndef AMDIS_PETSC_SOLVER_SCHUR_H
+#define AMDIS_PETSC_SOLVER_SCHUR_H
+
+namespace AMDiS {
+
+  using namespace std;
+
+
+#ifdef HAVE_PETSC_DEV
+  class PetscSolverSchur : public PetscSolver
+  {
+  public:
+    PetscSolverSchur() 
+      : PetscSolver()
+    {}
+
+    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
+
+    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
+
+    Flag getBoundaryDofRequirement()
+    {
+      return MeshDistributor::BOUNDARY_SUBOBJ_SORTED;
+    }
+    
+  protected:
+    void updateDofData(int nComponents);
+
+    /// Takes a DOF matrix and sends the values to the global PETSc matrix.
+    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
+		      int dispAddRow = 0, int dispAddCol = 0);
+
+    /// Takes a DOF vector and sends its values to a given PETSc vector.
+    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+		      int disMult = 1, int dispAdd = 0, bool rankOnly = false);
+
+  protected:
+    int nBoundaryDofs;
+
+    int rStartBoundaryDofs;
+
+    int nOverallBoundaryDofs;
+
+    std::set<DegreeOfFreedom> boundaryDofs;
+
+    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalBoundaryDof;
+
+    int nInteriorDofs;
+
+    int rStartInteriorDofs;
+
+    int nOverallInteriorDofs;
+
+    std::set<DegreeOfFreedom> interiorDofs;    
+
+    map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalInteriorDof;
+
+    Mat matA11, matA12, matA21, matA22;
+
+    IS interiorIs, boundaryIs;
+  };
+#endif
+
+}
+
+#endif
-- 
GitLab