From cd4895dbfbf45a4eab579d6b4c1261a13396ba3d Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Fri, 18 Mar 2011 11:14:05 +0000
Subject: [PATCH] New structure for PETSc solver objects.

---
 AMDiS/other/include/Makefile_AMDiS.mk  |   3 +-
 AMDiS/src/DOFMatrix.h                  |   2 +-
 AMDiS/src/ITL_OEMSolver.h              |   4 +-
 AMDiS/src/Makefile.am                  |   4 +-
 AMDiS/src/Makefile.in                  |  26 +-
 AMDiS/src/ProblemVec.h                 |  58 ++-
 AMDiS/src/parallel/MeshDistributor.cc  |   4 +-
 AMDiS/src/parallel/MeshDistributor.h   |  10 +-
 AMDiS/src/parallel/PetscProblemStat.cc | 650 +++++++++++++++++++++++
 AMDiS/src/parallel/PetscProblemStat.h  | 106 ++++
 AMDiS/src/parallel/PetscSolver.cc      | 694 +++----------------------
 AMDiS/src/parallel/PetscSolver.h       | 120 ++---
 AMDiS/src/parallel/PetscSolverSchur.cc |  84 ---
 AMDiS/src/parallel/PetscSolverSchur.h  |  51 --
 14 files changed, 935 insertions(+), 881 deletions(-)
 create mode 100644 AMDiS/src/parallel/PetscProblemStat.cc
 create mode 100644 AMDiS/src/parallel/PetscProblemStat.h
 delete mode 100644 AMDiS/src/parallel/PetscSolverSchur.cc
 delete mode 100644 AMDiS/src/parallel/PetscSolverSchur.h

diff --git a/AMDiS/other/include/Makefile_AMDiS.mk b/AMDiS/other/include/Makefile_AMDiS.mk
index b001345d..71fbb126 100644
--- a/AMDiS/other/include/Makefile_AMDiS.mk
+++ b/AMDiS/other/include/Makefile_AMDiS.mk
@@ -36,7 +36,8 @@ else
 endif
 
 PARMETIS_LIB = -L$(PARMETIS_DIR) -lparmetis -lmetis
-ZOLTAN_LIB = -L$(AMDIS_DIR)/lib/zoltan_build/lib -lzoltan
+#ZOLTAN_LIB = -L$(AMDIS_DIR)/lib/zoltan_build/lib -lzoltan
+ZOLTAN_LIB =
 
 LIBS += $(AMDIS_LIB) $(PNG_LIB)
 LIBS += -lboost_iostreams -lboost_filesystem -lboost_system -lboost_date_time
diff --git a/AMDiS/src/DOFMatrix.h b/AMDiS/src/DOFMatrix.h
index 82d45e3c..6d39ab12 100644
--- a/AMDiS/src/DOFMatrix.h
+++ b/AMDiS/src/DOFMatrix.h
@@ -399,7 +399,7 @@ namespace AMDiS {
     void serialize(std::ostream &out);
 
     /// Reads a matrix from an input stream.
-    void deserialize(::std::istream &in);
+    void deserialize(std::istream &in);
 
     ///
     int memsize();
diff --git a/AMDiS/src/ITL_OEMSolver.h b/AMDiS/src/ITL_OEMSolver.h
index 2983ac57..11157cd1 100644
--- a/AMDiS/src/ITL_OEMSolver.h
+++ b/AMDiS/src/ITL_OEMSolver.h
@@ -52,7 +52,7 @@ namespace AMDiS {
 
   public:
     /// The constructor reads needed parameters and sets solvers \ref name.
-    ITL_OEMSolver(::std::string name) : OEMSolver(name)  {}
+    ITL_OEMSolver(std::string name) : OEMSolver(name)  {}
   
     ~ITL_OEMSolver() {}
 
@@ -102,7 +102,7 @@ namespace AMDiS {
       typedef DOFMatrix::value_type value_type;
   public:
       /// The constructor reads needed parameters and sets solvers \ref name.
-      ITL_OEMSolver_para(::std::string name) 
+      ITL_OEMSolver_para(std::string name) 
 	  : OEMSolver(name), ell(OEMSolver::max_iter) 
       {
 	  GET_PARAMETER(0, name + "->ell", "%d", &ell);
diff --git a/AMDiS/src/Makefile.am b/AMDiS/src/Makefile.am
index 077f1800..a15f9abf 100644
--- a/AMDiS/src/Makefile.am
+++ b/AMDiS/src/Makefile.am
@@ -20,8 +20,8 @@ if USE_PARALLEL_DOMAIN_AMDIS
   parallel/ParallelDebug.cc \
   parallel/ParallelProblemStatBase.cc \
   parallel/ParMetisPartitioner.cc \
+  parallel/PetscProblemStat.cc \
   parallel/PetscSolver.cc \
-  parallel/PetscSolverSchur.cc \
   parallel/StdMpi.cc
   libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
   AMDIS_INCLUDES += -I$(PARMETIS_DIR)
@@ -252,8 +252,8 @@ parallel/MpiHelper.h \
 parallel/ParallelDebug.h \
 parallel/ParallelProblemStatBase.h \
 parallel/ParMetisPartitioner.h \
+parallel/PetscProblemStat.h \
 parallel/PetscSolver.h \
-parallel/PetscSolverSchur.h \
 parallel/StdMpi.h \
 parallel/ZoltanPartitioner.h \
 reinit/BoundaryElementDist.h \
diff --git a/AMDiS/src/Makefile.in b/AMDiS/src/Makefile.in
index ccf2fbbb..8b4a2a1f 100644
--- a/AMDiS/src/Makefile.in
+++ b/AMDiS/src/Makefile.in
@@ -46,8 +46,8 @@ host_triplet = @host@
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/ParallelDebug.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/ParallelProblemStatBase.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/ParMetisPartitioner.cc \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscProblemStat.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscSolver.cc \
-@USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/PetscSolverSchur.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@  parallel/StdMpi.cc
 
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1
@@ -91,8 +91,8 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
 	parallel/MeshDistributor.cc parallel/MeshManipulation.cc \
 	parallel/MeshPartitioner.cc parallel/MpiHelper.cc \
 	parallel/ParallelDebug.cc parallel/ParallelProblemStatBase.cc \
-	parallel/ParMetisPartitioner.cc parallel/PetscSolver.cc \
-	parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
+	parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \
+	parallel/PetscSolver.cc parallel/StdMpi.cc \
 	parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
 	AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
 	BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
@@ -137,8 +137,8 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-ParallelDebug.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-ParallelProblemStatBase.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-ParMetisPartitioner.lo \
+@USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscProblemStat.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscSolver.lo \
-@USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-PetscSolverSchur.lo \
 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@	libamdis_la-StdMpi.lo
 @USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo
 am__objects_3 = $(am__objects_1) $(am__objects_2)
@@ -574,8 +574,8 @@ parallel/MpiHelper.h \
 parallel/ParallelDebug.h \
 parallel/ParallelProblemStatBase.h \
 parallel/ParMetisPartitioner.h \
+parallel/PetscProblemStat.h \
 parallel/PetscSolver.h \
-parallel/PetscSolverSchur.h \
 parallel/StdMpi.h \
 parallel/ZoltanPartitioner.h \
 reinit/BoundaryElementDist.h \
@@ -879,8 +879,8 @@ distclean-compile:
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-Parameters.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-Parametric.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
@@ -1029,6 +1029,13 @@ libamdis_la-ParMetisPartitioner.lo: parallel/ParMetisPartitioner.cc
 @AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
 @am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-ParMetisPartitioner.lo `test -f 'parallel/ParMetisPartitioner.cc' || echo '$(srcdir)/'`parallel/ParMetisPartitioner.cc
 
+libamdis_la-PetscProblemStat.lo: parallel/PetscProblemStat.cc
+@am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscProblemStat.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscProblemStat.Tpo" -c -o libamdis_la-PetscProblemStat.lo `test -f 'parallel/PetscProblemStat.cc' || echo '$(srcdir)/'`parallel/PetscProblemStat.cc; \
+@am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-PetscProblemStat.Tpo" "$(DEPDIR)/libamdis_la-PetscProblemStat.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscProblemStat.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	source='parallel/PetscProblemStat.cc' object='libamdis_la-PetscProblemStat.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscProblemStat.lo `test -f 'parallel/PetscProblemStat.cc' || echo '$(srcdir)/'`parallel/PetscProblemStat.cc
+
 libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
 @am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolver.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolver.Tpo" -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc; \
 @am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-PetscSolver.Tpo" "$(DEPDIR)/libamdis_la-PetscSolver.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolver.Tpo"; exit 1; fi
@@ -1036,13 +1043,6 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
 @AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
 @am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
 
-libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
-@am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
-@am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
-@AMDEP_TRUE@@am__fastdepCXX_FALSE@	source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
-@AMDEP_TRUE@@am__fastdepCXX_FALSE@	DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
-@am__fastdepCXX_FALSE@	$(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
-
 libamdis_la-StdMpi.lo: parallel/StdMpi.cc
 @am__fastdepCXX_TRUE@	if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
 @am__fastdepCXX_TRUE@	then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
diff --git a/AMDiS/src/ProblemVec.h b/AMDiS/src/ProblemVec.h
index 57b058f4..ac4f2910 100644
--- a/AMDiS/src/ProblemVec.h
+++ b/AMDiS/src/ProblemVec.h
@@ -39,6 +39,8 @@
 
 namespace AMDiS {
 
+  using namespace std;
+
   struct OperatorPos 
   {
     int row, col;
@@ -51,14 +53,14 @@ namespace AMDiS {
   {
   protected:
     // Defines a mapping type from dof indices to world coordinates.
-    typedef std::map<DegreeOfFreedom, WorldVector<double> > DofToCoord;
+    typedef map<DegreeOfFreedom, WorldVector<double> > DofToCoord;
 
     // Defines a mapping type from dof indices to world coordinates.
-    typedef std::map<WorldVector<double>, DegreeOfFreedom> CoordToDof;
+    typedef map<WorldVector<double>, DegreeOfFreedom> CoordToDof;
 
   public:
     /// Constructor
-    ProblemVec(std::string nameStr,
+    ProblemVec(string nameStr,
 	       ProblemIterationInterface *problemIteration = NULL)
       : StandardProblemIteration(this),
 	name(nameStr),
@@ -227,7 +229,7 @@ namespace AMDiS {
     void writeFiles(AdaptInfo &adaptInfo, bool force);
 
     /// Interpolates fct to \ref solution.
-    void interpolInitialSolution(std::vector<AbstractFunction<double, WorldVector<double> >*> *fct);
+    void interpolInitialSolution(vector<AbstractFunction<double, WorldVector<double> >*> *fct);
 
     /// Adds an operator to \ref A.
     void addMatrixOperator(Operator *op, int i, int j,
@@ -346,7 +348,7 @@ namespace AMDiS {
     }
 
     /// Returns \ref meshes
-    inline std::vector<Mesh*> getMeshes() 
+    inline vector<Mesh*> getMeshes() 
     {
       return meshes; 
     }
@@ -361,19 +363,19 @@ namespace AMDiS {
     }
 
     /// Returns \ref feSpaces.
-    inline std::vector<FiniteElemSpace*> getFeSpaces() 
+    inline vector<FiniteElemSpace*> getFeSpaces() 
     { 
       return feSpaces; 
     }
 
     /// Returns \ref componentSpaces;
-    inline std::vector<FiniteElemSpace*> getComponentFeSpaces() 
+    inline vector<FiniteElemSpace*> getComponentFeSpaces() 
     {
       return componentSpaces;
     }
 
     /// Returns \ref estimator.
-    inline std::vector<Estimator*> getEstimators() 
+    inline vector<Estimator*> getEstimators() 
     { 
       return estimator; 
     }
@@ -409,13 +411,13 @@ namespace AMDiS {
     }
 
     /// Returns \ref marker.
-    inline std::vector<Marker*> getMarkers() 
+    inline vector<Marker*> getMarkers() 
     { 
       return marker; 
     }
 
     /// Returns the name of the problem
-    inline virtual std::string getName() 
+    inline virtual string getName() 
     { 
       return name; 
     }
@@ -445,7 +447,7 @@ namespace AMDiS {
      */
 
     /// Sets \ref estimator.
-    inline void setEstimator(std::vector<Estimator*> est) 
+    inline void setEstimator(vector<Estimator*> est) 
     { 
       estimator = est; 
     }
@@ -511,17 +513,17 @@ namespace AMDiS {
      * Outputs the mesh of the given component, but the values are taken from the 
      * residual error estimator. 
      */
-    void writeResidualMesh(int comp, AdaptInfo *adaptInfo, std::string name);
+    void writeResidualMesh(int comp, AdaptInfo *adaptInfo, string name);
 
     /// Function that implements the serialization procedure.
-    virtual void serialize(std::ostream &out);
+    virtual void serialize(ostream &out);
 
     /// Function that implements the deserialization procedure.
-    virtual void deserialize(std::istream &in);
+    virtual void deserialize(istream &in);
 
 
     /// Returns \ref fileWriters.
-    std::vector<FileWriterInterface*>& getFileWriterList() 
+    vector<FileWriterInterface*>& getFileWriterList() 
     {
       return fileWriters;
     }
@@ -536,7 +538,7 @@ namespace AMDiS {
   protected:
     
     /// Name of this problem.
-    std::string name;
+    string name;
 
     /// Number of problem components
     int nComponents;
@@ -549,16 +551,16 @@ namespace AMDiS {
     int nMeshes;
 
     /// FE spaces of this problem.
-    std::vector<FiniteElemSpace*> feSpaces;
+    vector<FiniteElemSpace*> feSpaces;
 
     /// Meshes of this problem.
-    std::vector<Mesh*> meshes;
+    vector<Mesh*> meshes;
 
     /// Pointer to the fe spaces for the different problem components
-    std::vector<FiniteElemSpace*> componentSpaces;
+    vector<FiniteElemSpace*> componentSpaces;
 
     /// Pointer to the meshes for the different problem components
-    std::vector<Mesh*> componentMeshes;
+    vector<Mesh*> componentMeshes;
 
     /** \brief
      * Stores information about which meshes must be traversed to assemble the
@@ -568,10 +570,10 @@ namespace AMDiS {
     ComponentTraverseInfo traverseInfo;
     
     /// Responsible for element marking.
-    std::vector<Marker*> marker;
+    vector<Marker*> marker;
 
     /// Estimator of this problem. Used in \ref estimate().
-    std::vector<Estimator*> estimator;
+    vector<Estimator*> estimator;
 
     /// Linear solver of this problem. Used in \ref solve().
     OEMSolver *solver;
@@ -595,7 +597,7 @@ namespace AMDiS {
      * be assembled only once. All other matrices will be assembled at every
      * time step.
      */
-    std::vector< std::vector< bool > > assembleMatrixOnlyOnce;
+    vector<vector<bool> > assembleMatrixOnlyOnce;
 
     /** \brief
      * If [i][j] of this field is set to true, the corresponding DOFMatrix of
@@ -603,13 +605,13 @@ namespace AMDiS {
      * to determine, if assembling of a matrix can be ommitted, if it is set
      * to be assembled only once.
      */
-    std::vector< std::vector< bool > > assembledMatrix;
+    vector<vector<bool> > assembledMatrix;
 
     /// Determines whether domain boundaries should be considered at assembling.
     bool useGetBound;
 
     /// Writes the meshes and solution after the adaption loop.
-    std::vector<FileWriterInterface*> fileWriters;
+    vector<FileWriterInterface*> fileWriters;
 
     /** \brief
      * All actions of mesh refinement are performed by refinementManager.
@@ -636,7 +638,7 @@ namespace AMDiS {
      * the problem. This may be used to compute the real error of the computed
      * solution.
      */
-    std::vector<AbstractFunction<double, WorldVector<double> >*> exactSolutionFcts;
+    vector<AbstractFunction<double, WorldVector<double> >*> exactSolutionFcts;
 
     /** \brief
      * If true, the error is not estimated but computed from the exact solution
@@ -654,9 +656,9 @@ namespace AMDiS {
     /// If true, AMDiS prints information about the assembling procedure to the screen.
     bool writeAsmInfo;
 
-    std::map<Operator*, std::vector<OperatorPos> > operators;
+    map<Operator*, vector<OperatorPos> > operators;
 
-    std::map<Operator*, Flag> opFlags;
+    map<Operator*, Flag> opFlags;
   };
 }
 
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 4b2f3c65..37ada7ca 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -60,9 +60,9 @@ namespace AMDiS {
   }
 
 
-  MeshDistributor::MeshDistributor(string str)
+  MeshDistributor::MeshDistributor()
     : probStat(0),
-      name(str),
+      name("parallel"),
       feSpace(NULL),
       mesh(NULL),
       refineManager(NULL),
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index 89cc6d6c..d21771ad 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -72,13 +72,13 @@ namespace AMDiS {
     typedef map<const DegreeOfFreedom*, DegreeOfFreedom> DofIndexMap;
 
     /// Mapps a boundar type, i.e., a boundary identifier index, to a periodic 
-    /// dof mapping.
+    /// DOF mapping.
     typedef map<BoundaryType, DofMapping> PeriodicDofMap;
 
     typedef vector<MeshStructure> MeshCodeVec;
 
   public:
-    MeshDistributor(string str);
+    MeshDistributor();
 		          
     virtual ~MeshDistributor() {}
 
@@ -106,7 +106,7 @@ namespace AMDiS {
      * This function checks if the mesh has changed on at least on rank. In this case,
      * the interior boundaries are adapted on all ranks such that they fit together on
      * all ranks. Furthermore the function \ref updateLocalGlobalNumbering() is called
-     * to update the dof numberings and mappings on all rank due to the new mesh
+     * to update the DOF numberings and mappings on all rank due to the new mesh
      * structure.
      *
      * \param[in]  tryRepartition   If this parameter is true, repartitioning may be
@@ -160,7 +160,7 @@ namespace AMDiS {
       return mapLocalGlobalDofs;
     }
 
-    /// Maps a local dof to its global index.
+    /// Maps a local DOF to its global index.
     inline DegreeOfFreedom mapLocalToGlobal(DegreeOfFreedom dof)
     {
       return mapLocalGlobalDofs[dof];
@@ -168,7 +168,7 @@ namespace AMDiS {
 
     DegreeOfFreedom mapGlobalToLocal(DegreeOfFreedom dof);
 
-    /// Maps a local dof to its local index.
+    /// Maps a local DOF to its local index.
     inline DegreeOfFreedom mapLocalToDofIndex(DegreeOfFreedom dof)
     {
       return mapLocalDofIndex[dof];
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
new file mode 100644
index 00000000..99a310c0
--- /dev/null
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -0,0 +1,650 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+#include <vector>
+#include <set>
+
+#include "parallel/PetscProblemStat.h"
+#include "parallel/StdMpi.h"
+#include "parallel/ParallelDebug.h"
+#include "parallel/PetscSolver.h"
+#include "DOFVector.h"
+#include "Debug.h"
+#include "SystemVector.h"
+
+#include "petscksp.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+
+  void PetscProblemStat::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
+  {
+    FUNCNAME("PetscProblemStat::solve()");
+
+    TEST_EXIT(meshDistributor)("Should not happen!\n");
+
+    double wtime = MPI::Wtime();
+
+    fillPetscMatrix(systemMatrix, rhs);
+    solvePetscMatrix(*solution, adaptInfo);   
+
+    INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
+		  MPI::Wtime() - wtime);
+  }
+
+
+  void PetscProblemStat::setDofMatrix(DOFMatrix* mat, int dispMult, 
+				      int dispAddRow, int dispAddCol)
+  {
+    FUNCNAME("PetscProblemStat::setDofMatrix()");
+
+    TEST_EXIT(mat)("No DOFMatrix!\n");
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits= mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+
+    traits::col<Matrix>::type col(mat->getBaseMatrix());
+    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
+
+    typedef traits::range_generator<row, Matrix>::type cursor_type;
+    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+
+    vector<int> cols;
+    vector<double> values;
+    cols.reserve(300);
+    values.reserve(300);
+    
+    vector<int> globalCols;
+
+
+    // === Traverse all rows of the dof matrix and insert row wise the values ===
+    // === to the PETSc matrix.                                               ===
+
+    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
+	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
+
+      // Global index of the current row dof.
+      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+      // Test if the current row dof is a periodic dof.
+      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
+
+      if (!periodicRow) {
+	// === Row DOF index is not periodic. ===
+
+	// Calculate PETSc row index.
+
+	int rowIndex = globalRowDof * dispMult + dispAddRow;
+
+	cols.clear();
+	values.clear();
+
+	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	     icursor != icend; ++icursor) {
+
+	  // Global index of the current column index.
+	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+	  // Test if the current col dof is a periodic dof.
+	  bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
+	  // Calculate PETSc col index.
+	  int colIndex = globalColDof * dispMult + dispAddCol;
+
+	  // Ignore all zero entries, expect it is a diagonal entry.
+ 	  if (value(*icursor) == 0.0 && rowIndex != colIndex)
+ 	    continue;
+
+	  if (!periodicCol) {
+	    // Calculate the exact position of the column index in the PETSc matrix.
+	    cols.push_back(globalColDof * dispMult + dispAddCol);
+	    values.push_back(value(*icursor));
+	  } else {
+	    // === Row index is not periodic, but column index is. ===
+
+	    // Create set of all periodic associations of the column index.
+	    std::set<int> perAsc;
+	    std::set<int>& perColAsc = 
+	      meshDistributor->getPerDofAssociations(globalColDof);
+	    for (std::set<int>::iterator it = perColAsc.begin(); 
+		 it != perColAsc.end(); ++it)
+	      if (*it >= -3)
+		perAsc.insert(*it);
+    
+	    // Scale value to the number of periodic associations of the column index.
+	    double scaledValue = 
+	      value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
+
+	    
+	    // === Create set of all matrix column indices due to the periodic ===
+	    // === associations of the column DOF index.                       ===
+
+	    vector<int> newCols;
+
+	    // First, add the original matrix index.
+	    newCols.push_back(globalColDof);
+	    
+	    // And add all periodic matrix indices.
+	    for (std::set<int>::iterator it = perAsc.begin(); 
+		 it != perAsc.end(); ++it) {
+	      int nCols = static_cast<int>(newCols.size());
+
+	      for (int i = 0; i < nCols; i++) {
+ 		TEST_EXIT_DBG(meshDistributor->isPeriodicDof(newCols[i], *it))
+ 		  ("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
+		   *it, newCols[i]);
+
+		newCols.push_back(meshDistributor->getPeriodicMapping(newCols[i], *it));
+	      }
+	    }
+
+	    for (unsigned int i = 0; i < newCols.size(); i++) {
+	      cols.push_back(newCols[i] * dispMult + dispAddCol);
+	      values.push_back(scaledValue);	      
+	    }
+	  }
+	}
+
+	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
+		     &(cols[0]), &(values[0]), ADD_VALUES);	
+      } else {
+	// === Row DOF index is periodic. ===
+
+	// Because this row is periodic, we will have to add the entries of this 
+	// matrix row to multiple rows. The following maps store to each row an
+	// array of column indices and values of the entries that must be added to
+	// the PETSc matrix.
+	map<int, vector<int> > colsMap;
+	map<int, vector<double> > valsMap;
+
+	// Traverse all column entries.
+	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
+	     icursor != icend; ++icursor) {
+
+	  // Global index of the current column index.
+	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
+
+	  // Ignore all zero entries, expect it is a diagonal entry.
+ 	  if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
+ 	    continue;
+
+
+	  // === Add all periodic associations of both, the row and the column ===
+	  // === indices to the set perAsc.                                    ===
+
+	  std::set<int> perAsc;
+
+	  if (meshDistributor->isPeriodicDof(globalColDof)) {
+	    std::set<int>& perColAsc = 
+	      meshDistributor->getPerDofAssociations(globalColDof);
+	    for (std::set<int>::iterator it = perColAsc.begin(); 
+		 it != perColAsc.end(); ++it)
+	      if (*it >= -3)
+		perAsc.insert(*it);
+	  }
+
+	  std::set<int>& perRowAsc = 
+	    meshDistributor->getPerDofAssociations(globalRowDof);
+	  for (std::set<int>::iterator it = perRowAsc.begin(); 
+	       it != perRowAsc.end(); ++it)
+	    if (*it >= -3)
+	      perAsc.insert(*it);
+
+	  // Scale the value with respect to the number of periodic associations.
+	  double scaledValue = 
+	    value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
+
+
+	  // === Create all matrix entries with respect to the periodic  ===
+	  // === associations of the row and column indices.             ===
+
+	  vector<pair<int, int> > entry;
+	  
+	  // First, add the original entry.
+	  entry.push_back(make_pair(globalRowDof, globalColDof));
+
+	  // Then, traverse the periodic associations of the row and column indices
+	  // and create the corresponding entries.
+	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
+	    int nEntry = static_cast<int>(entry.size());
+	    for (int i = 0; i < nEntry; i++) {
+	      int perRowDof = 0;
+	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first))
+		perRowDof = meshDistributor->getPeriodicMapping(entry[i].first, *it);
+	      else
+		perRowDof = entry[i].first;
+
+	      int perColDof;
+	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second))
+		perColDof = meshDistributor->getPeriodicMapping(entry[i].second, *it);
+	      else
+		perColDof = entry[i].second;	      	      
+	      
+
+	      entry.push_back(make_pair(perRowDof, perColDof));
+	    }
+	  }
+
+
+	  // === Translate the matrix entries to PETSc's matrix.
+
+	  for (vector<pair<int, int> >::iterator eIt = entry.begin(); 
+	       eIt != entry.end(); ++eIt) {
+	    // Calculate row and column indices of the PETSc matrix.
+	    int rowIndex = eIt->first * dispMult + dispAddRow;
+	    int colIndex = eIt->second * dispMult + dispAddCol;
+
+	    colsMap[rowIndex].push_back(colIndex);
+	    valsMap[rowIndex].push_back(scaledValue);
+	  }
+	}
+
+
+	// === Finally, add all periodic rows to the PETSc matrix. ===
+
+	for (map<int, vector<int> >::iterator rowIt = colsMap.begin();
+	     rowIt != colsMap.end(); ++rowIt) {
+	  TEST_EXIT_DBG(rowIt->second.size() == valsMap[rowIt->first].size())
+	    ("Should not happen!\n");
+
+	  int rowIndex = rowIt->first;
+	  MatSetValues(petscMatrix, 1, &rowIndex, rowIt->second.size(),
+		       &(rowIt->second[0]), &(valsMap[rowIt->first][0]), ADD_VALUES);
+	}
+      }
+    }
+  }
+
+
+  void PetscProblemStat::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+				      int dispMult, int dispAdd)
+  {
+    FUNCNAME("PetscProblemStat::setDofVector()");
+
+    // Traverse all used DOFs in the dof vector.
+    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
+    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
+      // Calculate global row index of the dof.
+      DegreeOfFreedom globalRowDof = 
+	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
+      // Calculate PETSc index of the row dof.
+      int index = globalRowDof * dispMult + dispAdd;
+
+      if (meshDistributor->isPeriodicDof(globalRowDof)) {
+	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
+	double value = *dofIt / (perAsc.size() + 1.0);
+	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
+
+	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
+	  int mappedDof = meshDistributor->getPeriodicMapping(globalRowDof, *perIt);
+	  int mappedIndex = mappedDof * dispMult + dispAdd;
+	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
+	}
+      } else {
+	// The dof index is not periodic.
+	double value = *dofIt;
+	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
+      }
+    }
+  }
+
+
+  void PetscProblemStat::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
+  {
+    FUNCNAME("PetscProblemStat::createPetscNnzStructure()");
+
+    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
+    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
+
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    d_nnz = new int[nRankRows];
+    o_nnz = new int[nRankRows];
+    for (int i = 0; i < nRankRows; i++) {
+      d_nnz[i] = 0;
+      o_nnz[i] = 0;
+    }
+
+    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
+    namespace traits = mtl::traits;
+    typedef DOFMatrix::base_matrix_type Matrix;
+    typedef vector<pair<int, int> > MatrixNnzEntry;
+    typedef map<int, DofContainer> RankToDofContainer;
+
+    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
+    // that this rank will send to. These nnz entries will be assembled on this rank,
+    // but because the row DOFs are not DOFs of this rank they will be send to the
+    // owner of the row DOFs.
+    map<int, MatrixNnzEntry> sendMatrixEntry;
+
+    // Maps to each DOF that must be send to another rank the rank number of the
+    // receiving rank.
+    map<DegreeOfFreedom, int> sendDofToRank;
+
+
+    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
+    for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
+	 it != meshDistributor->getRecvDofs().end(); ++it) {
+      sendMatrixEntry[it->first].resize(0);
+
+      for (DofContainer::iterator dofIt = it->second.begin();
+	   dofIt != it->second.end(); ++dofIt)
+	sendDofToRank[**dofIt] = it->first;
+    }
+
+
+    std::set<int> recvFromRank;
+    for (RankToDofContainer::iterator it = meshDistributor->getSendDofs().begin();
+	 it != meshDistributor->getSendDofs().end(); ++it)
+      recvFromRank.insert(it->first);
+
+
+    for (int i = 0; i < nComponents; i++) {
+      for (int j = 0; j < nComponents; j++) {
+ 	if (!(*mat)[i][j])
+	  continue;
+
+	Matrix bmat = (*mat)[i][j]->getBaseMatrix();
+
+	traits::col<Matrix>::type col(bmat);
+	traits::const_value<Matrix>::type value(bmat);
+	  
+	typedef traits::range_generator<row, Matrix>::type cursor_type;
+	typedef traits::range_generator<nz, cursor_type>::type icursor_type;
+	
+	for (cursor_type cursor = begin<row>(bmat), 
+	       cend = end<row>(bmat); cursor != cend; ++cursor) {
+	  
+	  int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
+
+	  vector<int> rows;
+	  rows.push_back(globalRowDof);
+	  vector<int> rowRank;
+	  if (meshDistributor->getIsRankDof(*cursor)) {
+	    rowRank.push_back(meshDistributor->getMpiRank());
+	  } else {
+	    // Find out who is the member of this DOF.
+	    TEST_EXIT_DBG(sendDofToRank.count(*cursor))("Should not happen!\n");
+	    
+	    rowRank.push_back(sendDofToRank[*cursor]);
+	  }
+
+	  // Map the local row number to the global DOF index and create from it
+	  // the global PETSc row index of this DOF.
+	  
+	  int petscRowIdx = globalRowDof * nComponents + i;
+	  
+	  if (meshDistributor->getIsRankDof(*cursor)) {
+	    	    
+	    // === The current row DOF is a rank dof, so create the corresponding ===
+	    // === nnz values directly on rank's nnz data.                        ===
+	    
+	    // This is the local row index of the local PETSc matrix.
+	    int localPetscRowIdx = 
+	      petscRowIdx - meshDistributor->getRstart() * nComponents;
+	    
+	    TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
+	      ("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
+	       *cursor, meshDistributor->mapLocalToGlobal(*cursor), petscRowIdx, localPetscRowIdx, meshDistributor->getRstart(), nComponents, nRankRows);
+	    
+	    
+	    // Traverse all non zero entries in this row.
+	    for (icursor_type icursor = begin<nz>(cursor), 
+		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
+	      int petscColIdx = 
+		meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
+	      
+	      if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
+		// The row DOF is a rank DOF, if also the column is a rank DOF, 
+		// increment the d_nnz values for this row, otherwise the o_nnz value.
+		if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
+		    petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
+		  d_nnz[localPetscRowIdx]++;
+		else
+		  o_nnz[localPetscRowIdx]++;
+	      }    
+	    }
+	  } else {
+	    // === The current row DOF is not a rank dof, i.e., it will be created ===
+	    // === on this rank, but after this it will be send to another rank    ===
+	    // === matrix. So we need to send also the corresponding nnz structure ===
+	    // === of this row to the corresponding rank.                          ===
+	    
+	    // Send all non zero entries to the member of the row DOF.
+	    int sendToRank = sendDofToRank[*cursor];
+	    
+	    for (icursor_type icursor = begin<nz>(cursor), 
+		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
+	      if (value(*icursor) != 0.0) {
+		int petscColIdx = 
+		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
+		
+		sendMatrixEntry[sendToRank].
+		  push_back(make_pair(petscRowIdx, petscColIdx));
+	      }
+	    }
+	    
+	  } // if (isRankDof[*cursor]) ... else ...
+	} // for each row in mat[i][j]
+      } 
+    }
+
+    // === Send and recv the nnz row structure to/from other ranks. ===
+
+    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
+    stdMpi.send(sendMatrixEntry);
+    for (std::set<int>::iterator it = recvFromRank.begin(); 
+	 it != recvFromRank.end(); ++it)
+      stdMpi.recv(*it);
+    stdMpi.startCommunication();
+
+
+    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
+    // === the PETSc nnz data structure.                                           ===
+
+    for (map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
+	 it != stdMpi.getRecvData().end(); ++it) {
+      if (it->second.size() > 0) {
+	for (unsigned int i = 0; i < it->second.size(); i++) {
+	  int r = it->second[i].first;
+	  int c = it->second[i].second;
+
+	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
+
+	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
+	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
+	     r, localRowIdx, nRankRows, it->first);
+	  
+	  if (c < meshDistributor->getRstart() * nComponents || 
+	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
+	    o_nnz[localRowIdx]++;
+	  else
+	    d_nnz[localRowIdx]++;
+	}
+      }
+    }
+
+    // The above algorithm for calculating the number of nnz per row over-
+    // approximates the value, i.e., the number is always equal or larger to 
+    // the real number of nnz values in the global parallel matrix. For small
+    // matrices, the problem may arise, that the result is larger than the
+    // number of elements in a row. This is fixed in the following.
+
+    if (nRankRows < 100) 
+      for (int i = 0; i < nRankRows; i++)
+	d_nnz[i] = std::min(d_nnz[i], nRankRows);
+  }
+
+
+  void PetscProblemStat::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
+  {
+    FUNCNAME("PetscProblemStat::fillPetscMatrix()");
+
+    double wtime = MPI::Wtime();
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
+
+    // === Create PETSc vector (rhs, solution and a temporary vector). ===
+
+    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
+    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
+    VecSetType(petscRhsVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
+    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
+    VecSetType(petscSolVec, VECMPI);
+
+    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
+    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
+    VecSetType(petscTmpVec, VECMPI);
+
+    int recvAllValues = 0;
+    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
+    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
+
+    if (!d_nnz || recvAllValues != 0) {
+      if (d_nnz) {
+	delete [] d_nnz;
+	d_nnz = NULL;
+	delete [] o_nnz;
+	o_nnz = NULL;
+      }
+
+      createPetscNnzStructure(mat);
+      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
+    }
+
+
+    // === Create PETSc matrix with the computed nnz data structure. ===
+
+    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
+		    0, d_nnz, 0, o_nnz, &petscMatrix);
+    
+#if (DEBUG != 0)
+    INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
+#endif
+
+#if (DEBUG != 0)
+    int a, b;
+    MatGetOwnershipRange(petscMatrix, &a, &b);
+    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
+      ("Wrong matrix ownership range!\n");
+    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
+      ("Wrong matrix ownership range!\n");
+#endif
+
+
+    // === Transfer values from DOF matrices to the PETSc matrix. === 
+
+    for (int i = 0; i < nComponents; i++)
+      for (int j = 0; j < nComponents; j++)
+	if ((*mat)[i][j])
+	  setDofMatrix((*mat)[i][j], nComponents, i, j);
+
+#if (DEBUG != 0)
+    INFO(info, 8)("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
+#endif
+
+    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
+    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
+
+#if (DEBUG != 0)
+    INFO(info, 8)("Fill petsc matrix 3 needed %.5f seconds\n", 
+		  TIME_USED(MPI::Wtime(), wtime));
+#endif
+
+    // === Transfer values from DOF vector to the PETSc vector. === 
+
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
+
+    VecAssemblyBegin(petscRhsVec);
+    VecAssemblyEnd(petscRhsVec);
+
+    INFO(info, 8)("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
+  }
+
+
+  void PetscProblemStat::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
+  {
+    FUNCNAME("PetscProblemStat::solvePetscMatrix()");
+
+#if 0
+    // Set old solution to be initiual guess for petsc solver.
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);
+
+    VecAssemblyBegin(petscSolVec);
+    VecAssemblyEnd(petscSolVec);
+#endif
+
+    // === Init PETSc solver. ===
+
+    KSP solver;
+    PC pc;
+    PetscSolver* petscSolver = 
+      PetscSolver::getPetscSolver(petscMatrix, meshDistributor, nComponents);
+    petscSolver->providePetscSolver(solver, pc);
+    delete petscSolver;
+
+    // Do not delete the solution vector, use it for the initial guess.
+    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
+   
+    // === Run PETSc. ===
+
+    KSPSolve(solver, petscRhsVec, petscSolVec);
+
+
+    // === Transfere values from PETSc's solution vectors to the dof vectors.
+
+    PetscScalar *vecPointer;
+    VecGetArray(petscSolVec, &vecPointer);
+
+    int nRankDofs = meshDistributor->getNumberRankDofs();
+    for (int i = 0; i < nComponents; i++) {
+      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
+      for (int j = 0; j < nRankDofs; j++)
+	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
+	  vecPointer[j * nComponents + i]; 
+    }
+
+    VecRestoreArray(petscSolVec, &vecPointer);
+
+
+    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to more ===
+    // === than one partition.                                                 ===
+    meshDistributor->synchVector(vec);
+
+
+    // === Print information about solution process. ===
+
+    int iterations = 0;
+    KSPGetIterationNumber(solver, &iterations);
+    MSG("  Number of iterations: %d\n", iterations);
+    adaptInfo->setSolverIterations(iterations);
+
+    double norm = 0.0;
+    MatMult(petscMatrix, petscSolVec, petscTmpVec);
+    VecAXPY(petscTmpVec, -1.0, petscRhsVec);
+    VecNorm(petscTmpVec, NORM_2, &norm);
+    MSG("  Residual norm: %e\n", norm);
+
+
+    // === Destroy Petsc's variables. ===
+
+    MatDestroy(petscMatrix);
+    VecDestroy(petscRhsVec);
+    VecDestroy(petscSolVec);
+    VecDestroy(petscTmpVec);
+    KSPDestroy(solver);
+  }
+
+}
diff --git a/AMDiS/src/parallel/PetscProblemStat.h b/AMDiS/src/parallel/PetscProblemStat.h
new file mode 100644
index 00000000..2c38a2b4
--- /dev/null
+++ b/AMDiS/src/parallel/PetscProblemStat.h
@@ -0,0 +1,106 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscProblemStat.h */
+
+#ifndef AMDIS_PETSC_PROBLEM_STAT_H
+#define AMDIS_PETSC_PROBLEM_STAT_H
+
+#include "AMDiS_fwd.h"
+#include "Global.h"
+#include "MeshDistributor.h"
+#include "ProblemVec.h"
+#include "ProblemInstat.h"
+#include "ParallelProblemStatBase.h"
+
+#include "petsc.h"
+#include "petscsys.h"
+#include "petscao.h"
+
+namespace AMDiS {
+  
+  class PetscProblemStat : public ParallelProblemStatBase
+  {
+  public:
+    PetscProblemStat(std::string nameStr,
+		     ProblemIterationInterface *problemIteration = NULL)
+      : ParallelProblemStatBase(nameStr, problemIteration),
+	d_nnz(NULL),
+	o_nnz(NULL),
+	lastMeshNnz(0)
+    {}
+
+    ~PetscProblemStat()
+    {}
+
+    void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
+
+  protected:
+    /// Creates a new non zero pattern structure for the Petsc matrix. 
+    void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
+    
+    /** \brief
+     * Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
+     * create the nnz structure of the PETSc matrix and the values are transfered to it.
+     * The given DOF vectors are used to the the values of the PETSc rhs vector.
+     *
+     * \param[in] mat
+     * \param[in] vec
+     */
+    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
+
+    /// Takes a dof matrix and sends the values to the global petsc matrix.
+    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
+		      int dispAddRow = 0, int dispAddCol = 0);
+
+    /// Takes a dof vector and sends its values to a given petsc vector.
+    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+		      int disMult = 1, int dispAdd = 0);
+
+    /// Use PETSc to solve the linear system of equations
+    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
+
+  protected:
+    /// Petsc's matrix structure.
+    Mat petscMatrix;
+
+    /** \brief
+     * Petsc's vector structures for the rhs vector, the solution vector and a
+     * temporary vector for calculating the final residuum.
+     */
+    Vec petscRhsVec, petscSolVec, petscTmpVec;
+
+    /// Arrays definig the non zero pattern of Petsc's matrix.
+    int *d_nnz, *o_nnz;
+
+    /** \brief
+     * Stores the mesh change index of the mesh the nnz structure was created for.
+     * Therefore, if the mesh change index is higher than this value, we have to create
+     * a new nnz structure for PETSc matrices, because the mesh has been changed and
+     * therefore also the assembled matrix structure.
+     */
+    int lastMeshNnz;
+  };
+
+  typedef PetscProblemStat ParallelProblemStat;
+
+} // namespace AMDiS
+
+#endif
diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index 30ff7a35..16bb7e85 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -10,17 +10,8 @@
 // See also license.opensource.txt in the distribution.
 
 
-#include <vector>
-#include <set>
-
 #include "parallel/PetscSolver.h"
-#include "parallel/StdMpi.h"
-#include "parallel/ParallelDebug.h"
-#include "DOFVector.h"
-#include "Debug.h"
-#include "SystemVector.h"
 
-#include "petscksp.h"
 
 namespace AMDiS {
 
@@ -33,637 +24,102 @@ namespace AMDiS {
 
     return 0;
   }
- 
-
-  void PetscSolver::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
-  {
-    FUNCNAME("PetscSolver::solve()");
 
-    TEST_EXIT(meshDistributor)("Should not happen!\n");
 
-    double wtime = MPI::Wtime();
-
-    fillPetscMatrix(systemMatrix, rhs);
-    solvePetscMatrix(*solution, adaptInfo);   
-
-    INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
-		  MPI::Wtime() - wtime);
-  }
-
-
-  void PetscSolver::setDofMatrix(DOFMatrix* mat, int dispMult, 
-				 int dispAddRow, int dispAddCol)
+  PetscSolver* PetscSolver::getPetscSolver(Mat& petscMatrix, MeshDistributor *dist, int n)
   {
-    FUNCNAME("PetscSolver::setDofMatrix()");
-
-    TEST_EXIT(mat)("No DOFMatrix!\n");
-
-    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
-    namespace traits= mtl::traits;
-    typedef DOFMatrix::base_matrix_type Matrix;
-
-    traits::col<Matrix>::type col(mat->getBaseMatrix());
-    traits::const_value<Matrix>::type value(mat->getBaseMatrix());
+    FUNCNAME("PetscSolver::getPetscSolver()");
 
-    typedef traits::range_generator<row, Matrix>::type cursor_type;
-    typedef traits::range_generator<nz, cursor_type>::type icursor_type;
-
-    vector<int> cols;
-    vector<double> values;
-    cols.reserve(300);
-    values.reserve(300);
+    string name("");
+    GET_PARAMETER(0, "parallel->solver", &name);
     
-    vector<int> globalCols;
-
-
-    // === Traverse all rows of the dof matrix and insert row wise the values ===
-    // === to the PETSc matrix.                                               ===
-
-    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
-	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
-
-      // Global index of the current row dof.
-      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-      // Test if the current row dof is a periodic dof.
-      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
-
-      if (!periodicRow) {
-	// === Row DOF index is not periodic. ===
-
-	// Calculate PETSc row index.
-
-	int rowIndex = globalRowDof * dispMult + dispAddRow;
-
-	cols.clear();
-	values.clear();
-
-	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-	     icursor != icend; ++icursor) {
-
-	  // Global index of the current column index.
-	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
-	  // Test if the current col dof is a periodic dof.
-	  bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
-	  // Calculate PETSc col index.
-	  int colIndex = globalColDof * dispMult + dispAddCol;
-
-	  // Ignore all zero entries, expect it is a diagonal entry.
- 	  if (value(*icursor) == 0.0 && rowIndex != colIndex)
- 	    continue;
-
-	  if (!periodicCol) {
-	    // Calculate the exact position of the column index in the PETSc matrix.
-	    cols.push_back(globalColDof * dispMult + dispAddCol);
-	    values.push_back(value(*icursor));
-	  } else {
-	    // === Row index is not periodic, but column index is. ===
-
-	    // Create set of all periodic associations of the column index.
-	    std::set<int> perAsc;
-	    std::set<int>& perColAsc = 
-	      meshDistributor->getPerDofAssociations(globalColDof);
-	    for (std::set<int>::iterator it = perColAsc.begin(); 
-		 it != perColAsc.end(); ++it)
-	      if (*it >= -3)
-		perAsc.insert(*it);
-    
-	    // Scale value to the number of periodic associations of the column index.
-	    double scaledValue = 
-	      value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
-
-	    
-	    // === Create set of all matrix column indices due to the periodic ===
-	    // === associations of the column DOF index.                       ===
-
-	    vector<int> newCols;
-
-	    // First, add the original matrix index.
-	    newCols.push_back(globalColDof);
-	    
-	    // And add all periodic matrix indices.
-	    for (std::set<int>::iterator it = perAsc.begin(); 
-		 it != perAsc.end(); ++it) {
-	      int nCols = static_cast<int>(newCols.size());
-
-	      for (int i = 0; i < nCols; i++) {
- 		TEST_EXIT_DBG(meshDistributor->isPeriodicDof(newCols[i], *it))
- 		  ("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
-		   *it, newCols[i]);
-
-		newCols.push_back(meshDistributor->getPeriodicMapping(newCols[i], *it));
-	      }
-	    }
-
-	    for (unsigned int i = 0; i < newCols.size(); i++) {
-	      cols.push_back(newCols[i] * dispMult + dispAddCol);
-	      values.push_back(scaledValue);	      
-	    }
-	  }
-	}
-
-	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
-		     &(cols[0]), &(values[0]), ADD_VALUES);	
-      } else {
-	// === Row DOF index is periodic. ===
-
-	// Because this row is periodic, we will have to add the entries of this 
-	// matrix row to multiple rows. The following maps store to each row an
-	// array of column indices and values of the entries that must be added to
-	// the PETSc matrix.
-	map<int, vector<int> > colsMap;
-	map<int, vector<double> > valsMap;
-
-	// Traverse all column entries.
-	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
-	     icursor != icend; ++icursor) {
-
-	  // Global index of the current column index.
-	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
-
-	  // Ignore all zero entries, expect it is a diagonal entry.
- 	  if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
- 	    continue;
-
-
-	  // === Add all periodic associations of both, the row and the column ===
-	  // === indices to the set perAsc.                                    ===
-
-	  std::set<int> perAsc;
-
-	  if (meshDistributor->isPeriodicDof(globalColDof)) {
-	    std::set<int>& perColAsc = 
-	      meshDistributor->getPerDofAssociations(globalColDof);
-	    for (std::set<int>::iterator it = perColAsc.begin(); 
-		 it != perColAsc.end(); ++it)
-	      if (*it >= -3)
-		perAsc.insert(*it);
-	  }
-
-	  std::set<int>& perRowAsc = 
-	    meshDistributor->getPerDofAssociations(globalRowDof);
-	  for (std::set<int>::iterator it = perRowAsc.begin(); 
-	       it != perRowAsc.end(); ++it)
-	    if (*it >= -3)
-	      perAsc.insert(*it);
-
-	  // Scale the value with respect to the number of periodic associations.
-	  double scaledValue = 
-	    value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
-
-
-	  // === Create all matrix entries with respect to the periodic  ===
-	  // === associations of the row and column indices.             ===
-
-	  vector<pair<int, int> > entry;
-	  
-	  // First, add the original entry.
-	  entry.push_back(make_pair(globalRowDof, globalColDof));
-
-	  // Then, traverse the periodic associations of the row and column indices
-	  // and create the corresponding entries.
-	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
-	    int nEntry = static_cast<int>(entry.size());
-	    for (int i = 0; i < nEntry; i++) {
-	      int perRowDof = 0;
-	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first))
-		perRowDof = meshDistributor->getPeriodicMapping(entry[i].first, *it);
-	      else
-		perRowDof = entry[i].first;
-
-	      int perColDof;
-	      if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second))
-		perColDof = meshDistributor->getPeriodicMapping(entry[i].second, *it);
-	      else
-		perColDof = entry[i].second;	      	      
-	      
-
-	      entry.push_back(make_pair(perRowDof, perColDof));
-	    }
-	  }
-
-
-	  // === Translate the matrix entries to PETSc's matrix.
-
-	  for (vector<pair<int, int> >::iterator eIt = entry.begin(); 
-	       eIt != entry.end(); ++eIt) {
-	    // Calculate row and column indices of the PETSc matrix.
-	    int rowIndex = eIt->first * dispMult + dispAddRow;
-	    int colIndex = eIt->second * dispMult + dispAddCol;
-
-	    colsMap[rowIndex].push_back(colIndex);
-	    valsMap[rowIndex].push_back(scaledValue);
-	  }
-	}
-
-
-	// === Finally, add all periodic rows to the PETSc matrix. ===
-
-	for (map<int, vector<int> >::iterator rowIt = colsMap.begin();
-	     rowIt != colsMap.end(); ++rowIt) {
-	  TEST_EXIT_DBG(rowIt->second.size() == valsMap[rowIt->first].size())
-	    ("Should not happen!\n");
-
-	  int rowIndex = rowIt->first;
-	  MatSetValues(petscMatrix, 1, &rowIndex, rowIt->second.size(),
-		       &(rowIt->second[0]), &(valsMap[rowIt->first][0]), ADD_VALUES);
-	}
-      }
+    if (name == "petsc-schur") {
+      return new PetscSolverSchur(petscMatrix, dist, n);
+    } else if (name == "petsc" || name == "") {
+      return new PetscSolverGlobalMatrix(petscMatrix, dist, n);
+    } else {
+      ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
     }
   }
 
 
-  void PetscSolver::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
-				 int dispMult, int dispAdd)
+  void PetscSolverGlobalMatrix::providePetscSolver(KSP &solver, PC &pc)
   {
-    FUNCNAME("PetscSolver::setDofVector()");
-
-    // Traverse all used DOFs in the dof vector.
-    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
-    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
-      // Calculate global row index of the dof.
-      DegreeOfFreedom globalRowDof = 
-	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
-      // Calculate PETSc index of the row dof.
-      int index = globalRowDof * dispMult + dispAdd;
-
-      if (meshDistributor->isPeriodicDof(globalRowDof)) {
-	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
-	double value = *dofIt / (perAsc.size() + 1.0);
-	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
-
-	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
-	  int mappedDof = meshDistributor->getPeriodicMapping(globalRowDof, *perIt);
-	  int mappedIndex = mappedDof * dispMult + dispAdd;
-	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
-	}
-      } else {
-	// The dof index is not periodic.
-	double value = *dofIt;
-	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
-      }
-    }
+    FUNCNAME("PetscSolverGlobalMatrix::providePetscProblemStat()");
+    
+    KSPCreate(PETSC_COMM_WORLD, &solver);
+    KSPGetPC(solver, &pc);
+    
+    KSPSetOperators(solver, matrix, matrix, SAME_NONZERO_PATTERN); 
+    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
+    KSPSetType(solver, KSPBCGS);
+    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
+    KSPSetFromOptions(solver);
+    PCSetFromOptions(pc);
   }
 
 
-  void PetscSolver::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
+  void PetscSolverSchur::providePetscSolver(KSP &solver, PC &pc)
   {
-    FUNCNAME("PetscSolver::createPetscNnzStructure()");
-
-    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
-    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
-
-    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
-    d_nnz = new int[nRankRows];
-    o_nnz = new int[nRankRows];
-    for (int i = 0; i < nRankRows; i++) {
-      d_nnz[i] = 0;
-      o_nnz[i] = 0;
-    }
-
-    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
-    namespace traits = mtl::traits;
-    typedef DOFMatrix::base_matrix_type Matrix;
-    typedef vector<pair<int, int> > MatrixNnzEntry;
+    FUNCNAME("PetscSolverSchur::providePetscProblemStat()");
+    
     typedef map<int, DofContainer> RankToDofContainer;
-
-    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
-    // that this rank will send to. These nnz entries will be assembled on this rank,
-    // but because the row DOFs are not DOFs of this rank they will be send to the
-    // owner of the row DOFs.
-    map<int, MatrixNnzEntry> sendMatrixEntry;
-
-    // Maps to each DOF that must be send to another rank the rank number of the
-    // receiving rank.
-    map<DegreeOfFreedom, int> sendDofToRank;
-
-
-    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
-    for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
-	 it != meshDistributor->getRecvDofs().end(); ++it) {
-      sendMatrixEntry[it->first].resize(0);
-
-      for (DofContainer::iterator dofIt = it->second.begin();
-	   dofIt != it->second.end(); ++dofIt)
-	sendDofToRank[**dofIt] = it->first;
-    }
-
-
-    std::set<int> recvFromRank;
-    for (RankToDofContainer::iterator it = meshDistributor->getSendDofs().begin();
-	 it != meshDistributor->getSendDofs().end(); ++it)
-      recvFromRank.insert(it->first);
-
-
-    for (int i = 0; i < nComponents; i++) {
-      for (int j = 0; j < nComponents; j++) {
- 	if (!(*mat)[i][j])
-	  continue;
-
-	Matrix bmat = (*mat)[i][j]->getBaseMatrix();
-
-	traits::col<Matrix>::type col(bmat);
-	traits::const_value<Matrix>::type value(bmat);
-	  
-	typedef traits::range_generator<row, Matrix>::type cursor_type;
-	typedef traits::range_generator<nz, cursor_type>::type icursor_type;
-	
-	for (cursor_type cursor = begin<row>(bmat), 
-	       cend = end<row>(bmat); cursor != cend; ++cursor) {
-	  
-	  int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
-
-	  vector<int> rows;
-	  rows.push_back(globalRowDof);
-	  vector<int> rowRank;
-	  if (meshDistributor->getIsRankDof(*cursor)) {
-	    rowRank.push_back(meshDistributor->getMpiRank());
-	  } else {
-	    // Find out who is the member of this DOF.
-	    TEST_EXIT_DBG(sendDofToRank.count(*cursor))("Should not happen!\n");
-	    
-	    rowRank.push_back(sendDofToRank[*cursor]);
-	  }
-
-	  // Map the local row number to the global DOF index and create from it
-	  // the global PETSc row index of this DOF.
-	  
-	  int petscRowIdx = globalRowDof * nComponents + i;
-	  
-	  if (meshDistributor->getIsRankDof(*cursor)) {
-	    	    
-	    // === The current row DOF is a rank dof, so create the corresponding ===
-	    // === nnz values directly on rank's nnz data.                        ===
-	    
-	    // This is the local row index of the local PETSc matrix.
-	    int localPetscRowIdx = 
-	      petscRowIdx - meshDistributor->getRstart() * nComponents;
-	    
-	    TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
-	      ("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
-	       *cursor, meshDistributor->mapLocalToGlobal(*cursor), petscRowIdx, localPetscRowIdx, meshDistributor->getRstart(), nComponents, nRankRows);
-	    
-	    
-	    // Traverse all non zero entries in this row.
-	    for (icursor_type icursor = begin<nz>(cursor), 
-		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
-	      int petscColIdx = 
-		meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
-	      
-	      if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
-		// The row DOF is a rank DOF, if also the column is a rank DOF, 
-		// increment the d_nnz values for this row, otherwise the o_nnz value.
-		if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
-		    petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
-		  d_nnz[localPetscRowIdx]++;
-		else
-		  o_nnz[localPetscRowIdx]++;
-	      }    
-	    }
-	  } else {
-	    // === The current row DOF is not a rank dof, i.e., it will be created ===
-	    // === on this rank, but after this it will be send to another rank    ===
-	    // === matrix. So we need to send also the corresponding nnz structure ===
-	    // === of this row to the corresponding rank.                          ===
-	    
-	    // Send all non zero entries to the member of the row DOF.
-	    int sendToRank = sendDofToRank[*cursor];
-	    
-	    for (icursor_type icursor = begin<nz>(cursor), 
-		   icend = end<nz>(cursor); icursor != icend; ++icursor) {
-	      if (value(*icursor) != 0.0) {
-		int petscColIdx = 
-		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
-		
-		sendMatrixEntry[sendToRank].
-		  push_back(make_pair(petscRowIdx, petscColIdx));
-	      }
-	    }
-	    
-	  } // if (isRankDof[*cursor]) ... else ...
-	} // for each row in mat[i][j]
-      } 
-    }
-
-    // === Send and recv the nnz row structure to/from other ranks. ===
-
-    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
-    stdMpi.send(sendMatrixEntry);
-    for (std::set<int>::iterator it = recvFromRank.begin(); 
-	 it != recvFromRank.end(); ++it)
-      stdMpi.recv(*it);
-    stdMpi.startCommunication();
-
-
-    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
-    // === the PETSc nnz data structure.                                           ===
-
-    for (map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
-	 it != stdMpi.getRecvData().end(); ++it) {
-      if (it->second.size() > 0) {
-	for (unsigned int i = 0; i < it->second.size(); i++) {
-	  int r = it->second[i].first;
-	  int c = it->second[i].second;
-
-	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
-
-	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
-	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
-	     r, localRowIdx, nRankRows, it->first);
+    typedef map<DegreeOfFreedom, bool> DofIndexToBool;
+    
+    std::set<DegreeOfFreedom> boundaryDofsSet;
+    std::set<DegreeOfFreedom> boundaryLocalDofs;
+    RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
+    for (RankToDofContainer::iterator rankIt = sendDofs.begin(); 
+	 rankIt != sendDofs.end(); ++rankIt)
+      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
+	   dofIt != rankIt->second.end(); ++dofIt) {
+	boundaryLocalDofs.insert(**dofIt);
 	  
-	  if (c < meshDistributor->getRstart() * nComponents || 
-	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
-	    o_nnz[localRowIdx]++;
-	  else
-	    d_nnz[localRowIdx]++;
-	}
+	for (int i = 0; i < nComponents; i++)
+	  boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
       }
-    }
-
-    // The above algorithm for calculating the number of nnz per row over-
-    // approximates the value, i.e., the number is always equal or larger to 
-    // the real number of nnz values in the global parallel matrix. For small
-    // matrices, the problem may arise, that the result is larger than the
-    // number of elements in a row. This is fixed in the following.
-
-    if (nRankRows < 100) 
-      for (int i = 0; i < nRankRows; i++)
-	d_nnz[i] = std::min(d_nnz[i], nRankRows);
-  }
-
-
-  void PetscSolver::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
-  {
-    FUNCNAME("PetscSolver::fillPetscMatrix()");
-
-    double wtime = MPI::Wtime();
-    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
-    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
-
-    // === Create PETSc vector (rhs, solution and a temporary vector). ===
-
-    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
-    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
-    VecSetType(petscRhsVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
-    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
-    VecSetType(petscSolVec, VECMPI);
-
-    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
-    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
-    VecSetType(petscTmpVec, VECMPI);
-
-    int recvAllValues = 0;
-    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
-    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
-
-    if (!d_nnz || recvAllValues != 0) {
-      if (d_nnz) {
-	delete [] d_nnz;
-	d_nnz = NULL;
-	delete [] o_nnz;
-	o_nnz = NULL;
-      }
-
-      createPetscNnzStructure(mat);
-      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
-    }
-
-
-    // === Create PETSc matrix with the computed nnz data structure. ===
-
-    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
-		    0, d_nnz, 0, o_nnz, &petscMatrix);
-    
-#if (DEBUG != 0)
-    INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
-#endif
-
-#if (DEBUG != 0)
-    int a, b;
-    MatGetOwnershipRange(petscMatrix, &a, &b);
-    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
-      ("Wrong matrix ownership range!\n");
-    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
-      ("Wrong matrix ownership range!\n");
-#endif
-
-
-    // === Transfer values from DOF matrices to the PETSc matrix. === 
-
-    for (int i = 0; i < nComponents; i++)
-      for (int j = 0; j < nComponents; j++)
-	if ((*mat)[i][j])
-	  setDofMatrix((*mat)[i][j], nComponents, i, j);
-
-#if (DEBUG != 0)
-    INFO(info, 8)("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
-#endif
-
-    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
-
-#if (DEBUG != 0)
-    INFO(info, 8)("Fill petsc matrix 3 needed %.5f seconds\n", 
-		  TIME_USED(MPI::Wtime(), wtime));
-#endif
-
-    // === Transfer values from DOF vector to the PETSc vector. === 
-
-    for (int i = 0; i < nComponents; i++)
-      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
-
-    VecAssemblyBegin(petscRhsVec);
-    VecAssemblyEnd(petscRhsVec);
-
-    INFO(info, 8)("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
-  }
-
-
-  void PetscSolver::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
-  {
-    FUNCNAME("PetscSolver::solvePetscMatrix()");
-
-#if 0
-    // Set old solution to be initiual guess for petsc solver.
-    for (int i = 0; i < nComponents; i++)
-      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);
-
-    VecAssemblyBegin(petscSolVec);
-    VecAssemblyEnd(petscSolVec);
-#endif
-
-    // === Init PETSc solver. ===
-
-    KSP solver;
-    PC pc;
-
-    providePetscSolver(solver, pc);
-
-    // Do not delete the solution vector, use it for the initial guess.
-    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
-   
-    // === Run PETSc. ===
-
-    KSPSolve(solver, petscRhsVec, petscSolVec);
-
-
-    // === Transfere values from PETSc's solution vectors to the dof vectors.
-
-    PetscScalar *vecPointer;
-    VecGetArray(petscSolVec, &vecPointer);
-
-    int nRankDofs = meshDistributor->getNumberRankDofs();
-    for (int i = 0; i < nComponents; i++) {
-      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
-      for (int j = 0; j < nRankDofs; j++)
-	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
-	  vecPointer[j * nComponents + i]; 
-    }
-
-    VecRestoreArray(petscSolVec, &vecPointer);
-
-
-    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to more ===
-    // === than one partition.                                                 ===
-    meshDistributor->synchVector(vec);
-
-
-    // === Print information about solution process. ===
-
-    int iterations = 0;
-    KSPGetIterationNumber(solver, &iterations);
-    MSG("  Number of iterations: %d\n", iterations);
-    adaptInfo->setSolverIterations(iterations);
-
-    double norm = 0.0;
-    MatMult(petscMatrix, petscSolVec, petscTmpVec);
-    VecAXPY(petscTmpVec, -1.0, petscRhsVec);
-    VecNorm(petscTmpVec, NORM_2, &norm);
-    MSG("  Residual norm: %e\n", norm);
-
-
-    // === Destroy Petsc's variables. ===
-
-    MatDestroy(petscMatrix);
-    VecDestroy(petscRhsVec);
-    VecDestroy(petscSolVec);
-    VecDestroy(petscTmpVec);
-    KSPDestroy(solver);
-  }
-
-
-  void PetscSolver::providePetscSolver(KSP &solver, PC &pc)
-  {
-    FUNCNAME("PetscSolver::providePetscSolver()");
+      
+    vector<DegreeOfFreedom> boundaryDofs(boundaryDofsSet.begin(), 
+					 boundaryDofsSet.end());
+      
+    std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
+    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
+    for (RankToDofContainer::iterator rankIt = recvDofs.begin(); 
+	 rankIt != recvDofs.end(); ++rankIt)
+      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
+	   dofIt != rankIt->second.end(); ++dofIt)
+	otherBoundaryLocalDofs.insert(**dofIt);
+      
+    std::set<DegreeOfFreedom> interiorDofsSet;
+    DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
+    for (DofIndexToBool::iterator dofIt = isRankDof.begin(); 
+	 dofIt != isRankDof.end(); ++dofIt)
+      if (dofIt->second && 
+	  boundaryLocalDofs.count(dofIt->first) == 0 && 
+	  otherBoundaryLocalDofs.count(dofIt->first) == 0)
+	for (int i = 0; i < nComponents; i++)	    
+	  interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
+      
+    vector<DegreeOfFreedom> interiorDofs(interiorDofsSet.begin(), 
+					 interiorDofsSet.end());
 
     KSPCreate(PETSC_COMM_WORLD, &solver);
     KSPGetPC(solver, &pc);
-
-    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
+    PCSetType(pc, PCFIELDSPLIT);
+      
+    IS interiorIs;
+    ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]), 
+		    PETSC_COPY_VALUES, &interiorIs);
+    PCFieldSplitSetIS(pc, "interior", interiorIs);
+      
+    IS boundaryIs;
+    ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]), 
+		    PETSC_COPY_VALUES, &boundaryIs);
+    PCFieldSplitSetIS(pc, "boundary", boundaryIs);
+     
+     
+    KSPSetOperators(solver, matrix, matrix, SAME_NONZERO_PATTERN); 
     KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
-    KSPSetType(solver, KSPBCGS);
-    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
     KSPSetFromOptions(solver);
     PCSetFromOptions(pc);
   }
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index 7aeecbc9..d123be9d 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -20,92 +20,66 @@
 
 /** \file PetscSolver.h */
 
-#ifndef AMDIS_PETSCSOLVER_H
-#define AMDIS_PETSCSOLVER_H
+#ifndef AMDIS_PETSC_SOLVER_H
+#define AMDIS_PETSC_SOLVER_H
 
-#include "AMDiS_fwd.h"
-#include "Global.h"
-#include "MeshDistributor.h"
-#include "ProblemVec.h"
-#include "ProblemInstat.h"
-#include "ParallelProblemStatBase.h"
-
-#include "petsc.h"
-#include "petscsys.h"
-#include "petscao.h"
+#include "parallel/PetscProblemStat.h"
+#include "CreatorInterface.h"
 
 namespace AMDiS {
-  
-  class PetscSolver : public ParallelProblemStatBase
-  {
-  public:
-    PetscSolver(std::string nameStr,
-		ProblemIterationInterface *problemIteration = NULL)
-      : ParallelProblemStatBase(nameStr, problemIteration),
-	d_nnz(NULL),
-	o_nnz(NULL),
-	lastMeshNnz(0)
-    {}
 
-    ~PetscSolver()
+  using namespace std;
+
+  class PetscSolver
+  {
+  protected:
+    PetscSolver(Mat& petscMatrix, MeshDistributor *dist, int n) 
+      : matrix(petscMatrix),
+	meshDistributor(dist),
+	nComponents(n)
     {}
 
-    void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
+  public:
+    virtual ~PetscSolver() {}
 
-  protected:
-    /// Creates a new non zero pattern structure for the Petsc matrix. 
-    void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
-    
-    /** \brief
-     * Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
-     * create the nnz structure of the PETSc matrix and the values are transfered to it.
-     * The given DOF vectors are used to the the values of the PETSc rhs vector.
-     *
-     * \param[in] mat
-     * \param[in] vec
-     */
-    void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
-
-    /// Takes a dof matrix and sends the values to the global petsc matrix.
-    void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
-		      int dispAddRow = 0, int dispAddCol = 0);
-
-    /// Takes a dof vector and sends its values to a given petsc vector.
-    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
-		      int disMult = 1, int dispAdd = 0);
-
-    /// Use PETSc to solve the linear system of equations
-    void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
-
-  private:
     /// Is used to create a PETSc solver and preconditioner object. This function may
     /// be overridden to create some special PETSc solver.
-    virtual void providePetscSolver(KSP &solver, PC &pc);
+    virtual void providePetscSolver(KSP &solver, PC &pc) = 0;
+
+    static PetscSolver* getPetscSolver(Mat& petscMatrix, 
+				       MeshDistributor *dist, 
+				       int n);
 
   protected:
-    /// Petsc's matrix structure.
-    Mat petscMatrix;
-
-    /** \brief
-     * Petsc's vector structures for the rhs vector, the solution vector and a
-     * temporary vector for calculating the final residuum.
-     */
-    Vec petscRhsVec, petscSolVec, petscTmpVec;
-
-    /// Arrays definig the non zero pattern of Petsc's matrix.
-    int *d_nnz, *o_nnz;
-
-    /** \brief
-     * Stores the mesh change index of the mesh the nnz structure was created for.
-     * Therefore, if the mesh change index is higher than this value, we have to create
-     * a new nnz structure for PETSc matrices, because the mesh has been changed and
-     * therefore also the assembled matrix structure.
-     */
-    int lastMeshNnz;
+    Mat& matrix;
+
+    MeshDistributor *meshDistributor;
+
+    int nComponents;
   };
 
-  typedef PetscSolver ParallelProblemStat;
 
-} //namespace AMDiS
+  class PetscSolverGlobalMatrix : public PetscSolver
+  {
+  public:
+    PetscSolverGlobalMatrix(Mat& petscMatrix, MeshDistributor *dist, int n)
+      : PetscSolver(petscMatrix, dist, n)
+    {}
+
+    void providePetscSolver(KSP &solver, PC &pc);
+  };
+
+
+  class PetscSolverSchur : public PetscSolver
+  {
+  public:
+    PetscSolverSchur(Mat& petscMatrix, MeshDistributor *dist, int n)
+      : PetscSolver(petscMatrix, dist, n)
+    {}
+
+    void providePetscSolver(KSP &solver, PC &pc);
+  };
+
+} // namespace AMDiS
 
 #endif
diff --git a/AMDiS/src/parallel/PetscSolverSchur.cc b/AMDiS/src/parallel/PetscSolverSchur.cc
deleted file mode 100644
index b28db0fa..00000000
--- a/AMDiS/src/parallel/PetscSolverSchur.cc
+++ /dev/null
@@ -1,84 +0,0 @@
-//
-// Software License for AMDiS
-//
-// Copyright (c) 2010 Dresden University of Technology 
-// All rights reserved.
-// Authors: Simon Vey, Thomas Witkowski et al.
-//
-// This file is part of AMDiS
-//
-// See also license.opensource.txt in the distribution.
-
-
-#include <vector>
-#include <set>
-
-#include "parallel/PetscSolverSchur.h"
-
-namespace AMDiS {
-
-  void PetscSolverSchur::providePetscSolver(KSP &solver, PC &pc)
-  {
-    FUNCNAME("PetscSolver::providePetscSolver()");
-
-    typedef map<int, DofContainer> RankToDofContainer;
-    typedef map<DegreeOfFreedom, bool> DofIndexToBool;
-
-    std::set<DegreeOfFreedom> boundaryDofsSet;
-    std::set<DegreeOfFreedom> boundaryLocalDofs;
-    RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
-    for (RankToDofContainer::iterator rankIt = sendDofs.begin(); 
-	 rankIt != sendDofs.end(); ++rankIt)
-      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
-	   dofIt != rankIt->second.end(); ++dofIt) {
-	boundaryLocalDofs.insert(**dofIt);
-
-	for (int i = 0; i < nComponents; i++)
-	  boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
-      }
-
-    vector<DegreeOfFreedom> boundaryDofs(boundaryDofsSet.begin(), 
-					 boundaryDofsSet.end());
-
-    std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
-    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
-    for (RankToDofContainer::iterator rankIt = recvDofs.begin(); 
-	 rankIt != recvDofs.end(); ++rankIt)
-      for (DofContainer::iterator dofIt = rankIt->second.begin(); 
-	   dofIt != rankIt->second.end(); ++dofIt)
-	otherBoundaryLocalDofs.insert(**dofIt);
-
-    std::set<DegreeOfFreedom> interiorDofsSet;
-    DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
-    for (DofIndexToBool::iterator dofIt = isRankDof.begin(); 
-	 dofIt != isRankDof.end(); ++dofIt)
-      if (dofIt->second && 
-	  boundaryLocalDofs.count(dofIt->first) == 0 && 
-	  otherBoundaryLocalDofs.count(dofIt->first) == 0)
-	for (int i = 0; i < nComponents; i++)	    
-	  interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
-
-    vector<DegreeOfFreedom> interiorDofs(interiorDofsSet.begin(), 
-					 interiorDofsSet.end());
-
-    IS interiorIs;
-    ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]), 
-		    PETSC_COPY_VALUES, &interiorIs);
-    PCFieldSplitSetIS(pc, "interior", interiorIs);
-
-    IS boundaryIs;
-    ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]), 
-		    PETSC_COPY_VALUES, &boundaryIs);
-    PCFieldSplitSetIS(pc, "boundary", boundaryIs);
-
-    KSPCreate(PETSC_COMM_WORLD, &solver);
-    KSPGetPC(solver, &pc);
-    PCSetType(pc, PCFIELDSPLIT);
-
-    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
-    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
-    KSPSetFromOptions(solver);
-    PCSetFromOptions(pc);
-  }
-
-}
diff --git a/AMDiS/src/parallel/PetscSolverSchur.h b/AMDiS/src/parallel/PetscSolverSchur.h
deleted file mode 100644
index 297622a8..00000000
--- a/AMDiS/src/parallel/PetscSolverSchur.h
+++ /dev/null
@@ -1,51 +0,0 @@
-// ============================================================================
-// ==                                                                        ==
-// == AMDiS - Adaptive multidimensional simulations                          ==
-// ==                                                                        ==
-// ==  http://www.amdis-fem.org                                              ==
-// ==                                                                        ==
-// ============================================================================
-//
-// Software License for AMDiS
-//
-// Copyright (c) 2010 Dresden University of Technology 
-// All rights reserved.
-// Authors: Simon Vey, Thomas Witkowski et al.
-//
-// This file is part of AMDiS
-//
-// See also license.opensource.txt in the distribution.
-
-
-
-/** \file PetscSolver.h */
-
-#ifndef AMDIS_PETSC_SOLVER_SCHUR_H
-#define AMDIS_PETSC_SOLVER_SCHUR_H
-
-#include "AMDiS_fwd.h"
-#include "PetscSolver.h"
-
-namespace AMDiS {
-  
-  class PetscSolverSchur : public PetscSolver
-  {
-  public:
-    PetscSolverSchur(std::string nameStr,
-		     ProblemIterationInterface *problemIteration = NULL)
-      : PetscSolver(nameStr, problemIteration)
-    {}
-
-    ~PetscSolverSchur()
-    {}
-
-
-  private:
-    /// Creates the fieldsplit preconditioner to solve the Schur complement system.
-    void providePetscSolver(KSP &solver, PC &pc);
-
-  };
-
-} //namespace AMDiS
-
-#endif
-- 
GitLab