From c541effad81726b647b390a72c1d75f093a0ff6f Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Tue, 8 Jun 2010 14:59:15 +0000
Subject: [PATCH] Several changes in parallel AMDiS interface.

---
 AMDiS/libtool                            |  87 +++---
 AMDiS/src/parallel/GlobalMatrixSolver.cc | 120 +++++---
 AMDiS/src/parallel/GlobalMatrixSolver.h  |  21 +-
 AMDiS/src/parallel/ParallelDomainBase.cc | 343 ++++++++++-------------
 AMDiS/src/parallel/ParallelDomainBase.h  | 143 +++++-----
 AMDiS/src/parallel/ParallelDomainDbg.cc  |  18 +-
 AMDiS/src/parallel/ParallelDomainDbg.h   |  18 +-
 7 files changed, 365 insertions(+), 385 deletions(-)

diff --git a/AMDiS/libtool b/AMDiS/libtool
index 6c93ff35..d350b762 100755
--- a/AMDiS/libtool
+++ b/AMDiS/libtool
@@ -30,10 +30,10 @@
 # the same distribution terms that you use for the rest of that program.
 
 # A sed program that does not truncate output.
-SED="/bin/sed"
+SED="/usr/bin/sed"
 
 # Sed that helps us avoid accidentally triggering echo(1) options like -n.
-Xsed="/bin/sed -e 1s/^X//"
+Xsed="/usr/bin/sed -e 1s/^X//"
 
 # The HP-UX ksh and POSIX shell print the target directory to stdout
 # if CDPATH is set.
@@ -44,7 +44,7 @@ available_tags=" CXX F77"
 
 # ### BEGIN LIBTOOL CONFIG
 
-# Libtool was configured on host NWRW13:
+# Libtool was configured on host p1d066:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
@@ -66,12 +66,12 @@ fast_install=yes
 
 # The host system.
 host_alias=
-host=i686-pc-linux-gnu
+host=x86_64-unknown-linux-gnu
 host_os=linux-gnu
 
 # The build system.
 build_alias=
-build=i686-pc-linux-gnu
+build=x86_64-unknown-linux-gnu
 build_os=linux-gnu
 
 # An echo program that does not interpret backslashes.
@@ -82,25 +82,22 @@ AR="ar"
 AR_FLAGS="cru"
 
 # A C compiler.
-LTCC="gcc"
+LTCC="/licsoft/libraries/openmpi/1.2.6/64bit/bin/mpicc"
 
 # LTCC compiler flags.
 LTCFLAGS="-g -O2"
 
 # A language-specific compiler.
-CC="gcc"
+CC="/licsoft/libraries/openmpi/1.2.6/64bit/bin/mpicc"
 
 # Is the compiler the GNU C compiler?
 with_gcc=yes
 
-gcc_dir=`gcc -print-file-name=. | /bin/sed 's,/\.$,,'`
-gcc_ver=`gcc -dumpversion`
-
 # An ERE matcher.
 EGREP="grep -E"
 
 # The linker used to build libraries.
-LD="/usr/bin/ld"
+LD="/usr/x86_64-suse-linux/bin/ld -m elf_x86_64"
 
 # Whether we need hard or soft links.
 LN_S="ln -s"
@@ -174,7 +171,7 @@ dlopen_self=unknown
 dlopen_self_static=unknown
 
 # Compiler flag to prevent dynamic linking.
-link_static_flag="-static"
+link_static_flag=""
 
 # Compiler flag to turn off builtin functions.
 no_builtin_flag=" -fno-builtin"
@@ -232,11 +229,11 @@ striplib="strip --strip-unneeded"
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
-predep_objects=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+predep_objects=""
 
 # Dependencies to place after the objects being linked to create a
 # shared library.
-postdep_objects=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+postdep_objects=""
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
@@ -248,7 +245,7 @@ postdeps=""
 
 # The library search path used internally by the compiler when linking
 # a shared library.
-compiler_lib_search_path=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+compiler_lib_search_path=""
 
 # Method to check whether dependent libraries are shared objects.
 deplibs_check_method="pass_all"
@@ -328,10 +325,10 @@ variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COM
 link_all_deplibs=unknown
 
 # Compile-time system search path for libraries
-sys_lib_search_path_spec=`echo " /u/backofen/adds/local/lib/i386-redhat-linux/4.1.2/ /u/backofen/adds/local/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../ /lib/i386-redhat-linux/4.1.2/ /lib/ /usr/lib/i386-redhat-linux/4.1.2/ /usr/lib/" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+sys_lib_search_path_spec=" /fastfs/wir/local/lib/x86_64-suse-linux/4.1.2/ /fastfs/wir/local/lib/../lib64/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/ /usr/lib/gcc/x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/../lib64/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../lib64/ /lib/x86_64-suse-linux/4.1.2/ /lib/../lib64/ /usr/lib/x86_64-suse-linux/4.1.2/ /usr/lib/../lib64/ /fastfs/wir/local/lib/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../ /lib/ /usr/lib/"
 
 # Run-time system search path for libraries
-sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/mysql /usr/lib/octave-3.0.1 /usr/lib/qt-3.3/lib /usr/lib/qt4/lib "
+sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/X11R6/lib64/Xaw3d /usr/X11R6/lib64 /usr/X11R6/lib/Xaw3d /usr/X11R6/lib /usr/x86_64-suse-linux/lib /usr/local/lib64 /usr/local/lib /opt/kde3/lib64 /opt/kde3/lib /opt/gnome/lib64 /opt/gnome/lib /lib64 /lib /usr/lib64 /usr/lib /opt/cluster/intel/cce/9.1.042/lib /opt/cluster/intel/fce/9.1.036/lib /opt/cluster/Pathscale3.0/lib/2.9.99 /opt/cluster/Pathscale3.0/lib/2.9.99/32 /work/licsoft/compilers/pgi/linux86-64/6.2/lib /work/licsoft/compilers/pgi/linux86-64/6.2/libso "
 
 # Fix the shell variable $srcfile for the compiler.
 fix_srcfile_path=""
@@ -6763,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
 # End:
 # ### BEGIN LIBTOOL TAG CONFIG: CXX
 
-# Libtool was configured on host NWRW13:
+# Libtool was configured on host p1d066:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
@@ -6785,12 +6782,12 @@ fast_install=yes
 
 # The host system.
 host_alias=
-host=i686-pc-linux-gnu
+host=x86_64-unknown-linux-gnu
 host_os=linux-gnu
 
 # The build system.
 build_alias=
-build=i686-pc-linux-gnu
+build=x86_64-unknown-linux-gnu
 build_os=linux-gnu
 
 # An echo program that does not interpret backslashes.
@@ -6801,25 +6798,22 @@ AR="ar"
 AR_FLAGS="cru"
 
 # A C compiler.
-LTCC="gcc"
+LTCC="/licsoft/libraries/openmpi/1.2.6/64bit/bin/mpicc"
 
 # LTCC compiler flags.
 LTCFLAGS="-g -O2"
 
 # A language-specific compiler.
-CC="g++"
+CC="/licsoft/libraries/openmpi/1.2.6/64bit/bin/mpiCC"
 
 # Is the compiler the GNU C compiler?
 with_gcc=yes
 
-gcc_dir=`gcc -print-file-name=. | /bin/sed 's,/\.$,,'`
-gcc_ver=`gcc -dumpversion`
-
 # An ERE matcher.
 EGREP="grep -E"
 
 # The linker used to build libraries.
-LD="/usr/bin/ld"
+LD="/usr/x86_64-suse-linux/bin/ld -m elf_x86_64"
 
 # Whether we need hard or soft links.
 LN_S="ln -s"
@@ -6893,7 +6887,7 @@ dlopen_self=unknown
 dlopen_self_static=unknown
 
 # Compiler flag to prevent dynamic linking.
-link_static_flag="-static"
+link_static_flag=""
 
 # Compiler flag to turn off builtin functions.
 no_builtin_flag=" -fno-builtin"
@@ -6948,11 +6942,11 @@ striplib="strip --strip-unneeded"
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
-predep_objects=`echo "/usr/lib/gcc/i386-redhat-linux/4.1.2/../../../crti.o /usr/lib/gcc/i386-redhat-linux/4.1.2/crtbeginS.o" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+predep_objects="/usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../lib64/crti.o /usr/lib64/gcc/x86_64-suse-linux/4.1.2/crtbeginS.o"
 
 # Dependencies to place after the objects being linked to create a
 # shared library.
-postdep_objects=`echo "/usr/lib/gcc/i386-redhat-linux/4.1.2/crtendS.o /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../crtn.o" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+postdep_objects="/usr/lib64/gcc/x86_64-suse-linux/4.1.2/crtendS.o /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../lib64/crtn.o"
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
@@ -6960,11 +6954,11 @@ predeps=""
 
 # Dependencies to place after the objects being linked to create a
 # shared library.
-postdeps="-lstdc++ -lm -lgcc_s -lc -lgcc_s"
+postdeps="-lmpi_cxx -lmpi -lopen-rte -lopen-pal -libverbs -lrt -lnuma -ldl -lnsl -lutil -ldl -lstdc++ -lm -lgcc_s -lpthread -lc -lgcc_s"
 
 # The library search path used internally by the compiler when linking
 # a shared library.
-compiler_lib_search_path=`echo "-L/u/backofen/adds/local/lib -L/usr/lib/gcc/i386-redhat-linux/4.1.2 -L/usr/lib/gcc/i386-redhat-linux/4.1.2 -L/usr/lib/gcc/i386-redhat-linux/4.1.2/../../.." | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+compiler_lib_search_path="-L/usr/lib64 -L/licsoft/libraries/openmpi/1.2.6/64bit/lib -L/usr/lib64/gcc/x86_64-suse-linux/4.1.2 -L/usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../lib64 -L/lib/../lib64 -L/usr/lib/../lib64 -L/fastfs/wir/local/lib -L/usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib -L/usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../.."
 
 # Method to check whether dependent libraries are shared objects.
 deplibs_check_method="pass_all"
@@ -7044,10 +7038,10 @@ variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COM
 link_all_deplibs=unknown
 
 # Compile-time system search path for libraries
-sys_lib_search_path_spec=`echo " /u/backofen/adds/local/lib/i386-redhat-linux/4.1.2/ /u/backofen/adds/local/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../ /lib/i386-redhat-linux/4.1.2/ /lib/ /usr/lib/i386-redhat-linux/4.1.2/ /usr/lib/" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+sys_lib_search_path_spec=" /fastfs/wir/local/lib/x86_64-suse-linux/4.1.2/ /fastfs/wir/local/lib/../lib64/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/ /usr/lib/gcc/x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/../lib64/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../x86_64-suse-linux/4.1.2/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../lib64/ /lib/x86_64-suse-linux/4.1.2/ /lib/../lib64/ /usr/lib/x86_64-suse-linux/4.1.2/ /usr/lib/../lib64/ /fastfs/wir/local/lib/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../../x86_64-suse-linux/lib/ /usr/lib64/gcc/x86_64-suse-linux/4.1.2/../../../ /lib/ /usr/lib/"
 
 # Run-time system search path for libraries
-sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/mysql /usr/lib/octave-3.0.1 /usr/lib/qt-3.3/lib /usr/lib/qt4/lib "
+sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/X11R6/lib64/Xaw3d /usr/X11R6/lib64 /usr/X11R6/lib/Xaw3d /usr/X11R6/lib /usr/x86_64-suse-linux/lib /usr/local/lib64 /usr/local/lib /opt/kde3/lib64 /opt/kde3/lib /opt/gnome/lib64 /opt/gnome/lib /lib64 /lib /usr/lib64 /usr/lib /opt/cluster/intel/cce/9.1.042/lib /opt/cluster/intel/fce/9.1.036/lib /opt/cluster/Pathscale3.0/lib/2.9.99 /opt/cluster/Pathscale3.0/lib/2.9.99/32 /work/licsoft/compilers/pgi/linux86-64/6.2/lib /work/licsoft/compilers/pgi/linux86-64/6.2/libso "
 
 # Fix the shell variable $srcfile for the compiler.
 fix_srcfile_path=""
@@ -7071,7 +7065,7 @@ include_expsyms=""
 
 # ### BEGIN LIBTOOL TAG CONFIG: F77
 
-# Libtool was configured on host NWRW13:
+# Libtool was configured on host p1d066:
 
 # Shell to use when invoking shell scripts.
 SHELL="/bin/sh"
@@ -7093,12 +7087,12 @@ fast_install=yes
 
 # The host system.
 host_alias=
-host=i686-pc-linux-gnu
+host=x86_64-unknown-linux-gnu
 host_os=linux-gnu
 
 # The build system.
 build_alias=
-build=i686-pc-linux-gnu
+build=x86_64-unknown-linux-gnu
 build_os=linux-gnu
 
 # An echo program that does not interpret backslashes.
@@ -7109,25 +7103,22 @@ AR="ar"
 AR_FLAGS="cru"
 
 # A C compiler.
-LTCC="gcc"
+LTCC="/licsoft/libraries/openmpi/1.2.6/64bit/bin/mpicc"
 
 # LTCC compiler flags.
 LTCFLAGS="-g -O2"
 
 # A language-specific compiler.
-CC="f95"
+CC="g77"
 
 # Is the compiler the GNU C compiler?
-with_gcc=yes
-
-gcc_dir=`gcc -print-file-name=. | /bin/sed 's,/\.$,,'`
-gcc_ver=`gcc -dumpversion`
+with_gcc=
 
 # An ERE matcher.
 EGREP="grep -E"
 
 # The linker used to build libraries.
-LD="/usr/bin/ld"
+LD="/usr/x86_64-suse-linux/bin/ld -m elf_x86_64"
 
 # Whether we need hard or soft links.
 LN_S="ln -s"
@@ -7259,11 +7250,11 @@ striplib="strip --strip-unneeded"
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
-predep_objects=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+predep_objects=""
 
 # Dependencies to place after the objects being linked to create a
 # shared library.
-postdep_objects=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+postdep_objects=""
 
 # Dependencies to place before the objects being linked to create a
 # shared library.
@@ -7275,7 +7266,7 @@ postdeps=""
 
 # The library search path used internally by the compiler when linking
 # a shared library.
-compiler_lib_search_path=`echo "" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+compiler_lib_search_path=""
 
 # Method to check whether dependent libraries are shared objects.
 deplibs_check_method="pass_all"
@@ -7355,10 +7346,10 @@ variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COM
 link_all_deplibs=unknown
 
 # Compile-time system search path for libraries
-sys_lib_search_path_spec=`echo " /u/backofen/adds/local/lib/i386-redhat-linux/4.1.2/ /u/backofen/adds/local/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../../i386-redhat-linux/lib/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../i386-redhat-linux/4.1.2/ /usr/lib/gcc/i386-redhat-linux/4.1.2/../../../ /lib/i386-redhat-linux/4.1.2/ /lib/ /usr/lib/i386-redhat-linux/4.1.2/ /usr/lib/" | $SED -e "s@${gcc_dir}@\${gcc_dir}@g;s@${gcc_ver}@\${gcc_ver}@g"`
+sys_lib_search_path_spec=" /fastfs/wir/local/lib/x86_64-suse-linux/3.3.5/ /fastfs/wir/local/lib/ /usr/lib64/gcc-lib/x86_64-suse-linux/3.3.5/ /usr/lib/gcc/x86_64-suse-linux/3.3.5/ /usr/lib64/gcc-lib/x86_64-suse-linux/3.3.5/../../../../x86_64-suse-linux/lib/x86_64-suse-linux/3.3.5/ /usr/lib64/gcc-lib/x86_64-suse-linux/3.3.5/../../../../x86_64-suse-linux/lib/ /usr/lib64/gcc-lib/x86_64-suse-linux/3.3.5/../../../x86_64-suse-linux/3.3.5/ /usr/lib64/gcc-lib/x86_64-suse-linux/3.3.5/../../../ /lib/x86_64-suse-linux/3.3.5/ /lib/ /usr/lib/x86_64-suse-linux/3.3.5/ /usr/lib/"
 
 # Run-time system search path for libraries
-sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/mysql /usr/lib/octave-3.0.1 /usr/lib/qt-3.3/lib /usr/lib/qt4/lib "
+sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/X11R6/lib64/Xaw3d /usr/X11R6/lib64 /usr/X11R6/lib/Xaw3d /usr/X11R6/lib /usr/x86_64-suse-linux/lib /usr/local/lib64 /usr/local/lib /opt/kde3/lib64 /opt/kde3/lib /opt/gnome/lib64 /opt/gnome/lib /lib64 /lib /usr/lib64 /usr/lib /opt/cluster/intel/cce/9.1.042/lib /opt/cluster/intel/fce/9.1.036/lib /opt/cluster/Pathscale3.0/lib/2.9.99 /opt/cluster/Pathscale3.0/lib/2.9.99/32 /work/licsoft/compilers/pgi/linux86-64/6.2/lib /work/licsoft/compilers/pgi/linux86-64/6.2/libso "
 
 # Fix the shell variable $srcfile for the compiler.
 fix_srcfile_path=""
diff --git a/AMDiS/src/parallel/GlobalMatrixSolver.cc b/AMDiS/src/parallel/GlobalMatrixSolver.cc
index 1771747b..173a008d 100644
--- a/AMDiS/src/parallel/GlobalMatrixSolver.cc
+++ b/AMDiS/src/parallel/GlobalMatrixSolver.cc
@@ -16,17 +16,36 @@ namespace AMDiS {
     return 0;
   }
  
-  void GlobalMatrixSolver::solve()
+
+  void GlobalMatrixSolver::addToMeshDistributor(MeshDistributor& m)
+  {
+    meshDistributor = &m;
+    m.addProblemStat(this);
+  }
+
+
+  void GlobalMatrixSolver::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
+					     bool assembleMatrix,
+					     bool assembleVector)
+  {
+    meshDistributor->checkMeshChange();
+    ProblemVec::buildAfterCoarsen(adaptInfo, flag, assembleMatrix, assembleVector);
+  }
+
+
+  void GlobalMatrixSolver::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
   {
     FUNCNAME("GlobalMatrixSolver::solve()");
 
+    TEST_EXIT(meshDistributor)("Should not happen!\n");
+
 #ifdef _OPENMP
     double wtime = omp_get_wtime();
 #endif
     clock_t first = clock();
 
-    fillPetscMatrix(probStat->getSystemMatrix(), probStat->getRhs());
-    solvePetscMatrix(*(probStat->getSolution()));
+    fillPetscMatrix(systemMatrix, rhs);
+    solvePetscMatrix(*solution);
 
 #ifdef _OPENMP
     INFO(info, 8)("solution of discrete system needed %.5f seconds system time / %.5f seconds wallclock time\n",
@@ -71,9 +90,9 @@ namespace AMDiS {
       values.clear();
 
       // Global index of the current row dof.
-      int globalRowDof = mapLocalGlobalDofs[*cursor];
+      DegreeOfFreedom globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
       // Test if the current row dof is a periodic dof.
-      bool periodicRow = (periodicDof.count(globalRowDof) > 0);
+      bool periodicRow = (meshDistributor->getPeriodicDofMap().count(globalRowDof) > 0);
 
 
       // === Traverse all non zero entries of the row and produce vector cols ===
@@ -86,16 +105,18 @@ namespace AMDiS {
 	// Set only non null values.
 	if (value(*icursor) != 0.0) {
 	  // Global index of the current column index.
-	  int globalColDof = mapLocalGlobalDofs[col(*icursor)];
+	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
 	  // Calculate the exact position of the column index in the petsc matrix.
 	  int colIndex = globalColDof * dispMult + dispAddCol;
 
 	  // If the current row is not periodic, but the current dof index is periodic,
 	  // we have to duplicate the value to the other corresponding periodic columns.
- 	  if (periodicRow == false && periodicDof.count(globalColDof) > 0) {
+ 	  if (periodicRow == false && 
+	      meshDistributor->getPeriodicDofMap().count(globalColDof) > 0) {
 	    // The value is assign to n matrix entries, therefore, every entry 
 	    // has only 1/n value of the original entry.
-	    double scalFactor = 1.0 / (periodicDof[globalColDof].size() + 1.0);
+	    double scalFactor = 
+	      1.0 / (meshDistributor->getPeriodicDof(globalColDof).size() + 1.0);
 
 	    // Insert original entry.
  	    cols.push_back(colIndex);
@@ -103,8 +124,8 @@ namespace AMDiS {
 
 	    // Insert the periodic entries.
  	    for (std::set<DegreeOfFreedom>::iterator it = 
-		   periodicDof[globalColDof].begin();
- 		 it != periodicDof[globalColDof].end(); ++it) {
+		   meshDistributor->getPeriodicDof(globalColDof).begin();
+ 		 it != meshDistributor->getPeriodicDof(globalColDof).end(); ++it) {
  	      cols.push_back(*it * dispMult + dispAddCol);
  	      values.push_back(value(*icursor) * scalFactor);
 	    }
@@ -126,7 +147,8 @@ namespace AMDiS {
       if (periodicRow) {
 	// The row dof is periodic, so send dof to all the corresponding rows.
 
-	double scalFactor = 1.0 / (periodicDof[globalRowDof].size() + 1.0);
+	double scalFactor = 
+	  1.0 / (meshDistributor->getPeriodicDof(globalRowDof).size() + 1.0);
 	
 	int diagIndex = -1;
 	for (int i = 0; i < static_cast<int>(values.size()); i++) {
@@ -149,8 +171,8 @@ namespace AMDiS {
 	  values[diagIndex] = 0.0;
 
 	// Send the row to all periodic row indices.
-	for (std::set<DegreeOfFreedom>::iterator it = periodicDof[globalRowDof].begin();
-	     it != periodicDof[globalRowDof].end(); ++it) {
+	for (std::set<DegreeOfFreedom>::iterator it = meshDistributor->getPeriodicDof(globalRowDof).begin();
+	     it != meshDistributor->getPeriodicDof(globalRowDof).end(); ++it) {
 	  int perRowIndex = *it * dispMult + dispAddRow;
 	  MatSetValues(petscMatrix, 1, &perRowIndex, cols.size(), 
 		       &(cols[0]), &(values[0]), ADD_VALUES);
@@ -172,18 +194,18 @@ namespace AMDiS {
     DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
     for (dofIt.reset(); !dofIt.end(); ++dofIt) {
       // Calculate global row index of the dof.
-      int globalRow = mapLocalGlobalDofs[dofIt.getDOFIndex()];
+      DegreeOfFreedom globalRow = meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
       // Calculate petsc index of the row dof.
       int index = globalRow * dispMult + dispAdd;
 
-      if (periodicDof.count(globalRow) > 0) {
+      if (meshDistributor->getPeriodicDofMap().count(globalRow) > 0) {
 	// The dof index is periodic, so devide the value to all dof entries.
 
-	double value = *dofIt / (periodicDof[globalRow].size() + 1.0);
+	double value = *dofIt / (meshDistributor->getPeriodicDof(globalRow).size() + 1.0);
 	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
 
-	for (std::set<DegreeOfFreedom>::iterator it = periodicDof[globalRow].begin();
-	     it != periodicDof[globalRow].end(); ++it) {
+	for (std::set<DegreeOfFreedom>::iterator it = meshDistributor->getPeriodicDof(globalRow).begin();
+	     it != meshDistributor->getPeriodicDof(globalRow).end(); ++it) {
 	  index = *it * dispMult + dispAdd;
 	  VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
 	}
@@ -204,6 +226,7 @@ namespace AMDiS {
     TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
     TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
 
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
     d_nnz = new int[nRankRows];
     o_nnz = new int[nRankRows];
     for (int i = 0; i < nRankRows; i++) {
@@ -238,21 +261,23 @@ namespace AMDiS {
 
 	    // Map the local row number to the global DOF index and create from it
 	    // the global PETSc row index of this DOF.
-	    int petscRowIdx = mapLocalGlobalDofs[*cursor] * nComponents + i;
+	    int petscRowIdx = 
+	      meshDistributor->mapLocalToGlobal(*cursor) * nComponents + i;
 
-	    if (isRankDof[*cursor]) {
+	    if (meshDistributor->getIsRankDof(*cursor)) {
 
 	      // === The current row DOF is a rank dof, so create the corresponding ===
 	      // === nnz values directly on rank's nnz data.                        ===
 
 	      // This is the local row index of the local PETSc matrix.
-	      int localPetscRowIdx = petscRowIdx - rstart * nComponents;
+	      int localPetscRowIdx = 
+		petscRowIdx - meshDistributor->getRstart() * nComponents;
 
 #if (DEBUG != 0)    
 	      if (localPetscRowIdx < 0 || localPetscRowIdx >= nRankRows) {
-		std::cout << "ERROR in rank: " << mpiRank << std::endl;
+		std::cout << "ERROR in rank: " << meshDistributor->getMpiRank() << std::endl;
 		std::cout << "  Wrong r = " << localPetscRowIdx << " " << *cursor 
-			  << " " << mapLocalGlobalDofs[*cursor] << " " 
+			  << " " << meshDistributor->mapLocalToGlobal(*cursor) << " " 
 			  << nRankRows << std::endl;
 		ERROR_EXIT("Should not happen!\n");
 	      }
@@ -262,19 +287,21 @@ namespace AMDiS {
 	      for (icursor_type icursor = begin<nz>(cursor), 
 		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
 		if (value(*icursor) != 0.0) {
-		  int petscColIdx = mapLocalGlobalDofs[col(*icursor)] * nComponents + j;
+		  int petscColIdx = 
+		    meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
 
 		  // The row DOF is a rank DOF, if also the column is a rank DOF, 
 		  // increment the d_nnz values for this row, otherwise the o_nnz value.
-		  if (petscColIdx >= rstart * nComponents && 
-		      petscColIdx < rstart * nComponents + nRankRows)
+		  if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
+		      petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
 		    d_nnz[localPetscRowIdx]++;
 		  else
 		    o_nnz[localPetscRowIdx]++;
 		}    
 	      }
 	    } else {
-	      
+	      typedef std::map<int, DofContainer> RankToDofContainer;
+
 	      // === The current row DOF is not a rank dof, i.e., it will be created ===
 	      // === on this rank, but after this it will be send to another rank    ===
 	      // === matrix. So we need to send also the corresponding nnz structure ===
@@ -282,17 +309,11 @@ namespace AMDiS {
 
 	      // Find out who is the member of this DOF.
 	      int sendToRank = -1;
-	      for (RankToDofContainer::iterator it = recvDofs.begin();
-		   it != recvDofs.end(); ++it) {
+	      for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
+		   it != meshDistributor->getRecvDofs().end(); ++it) {
 		for (DofContainer::iterator dofIt = it->second.begin();
 		     dofIt != it->second.end(); ++dofIt) {
 		  if (**dofIt == *cursor) {
-
-		    if (petscRowIdx == 6717) {
-		      debug::writeDofMesh(mpiRank, *cursor, feSpace);
-		      MSG("SEND DOF TO: %d/%d\n", it->first, *cursor);
-		    }
-
 		    sendToRank = it->first;
 		    break;
 		  }
@@ -308,7 +329,8 @@ namespace AMDiS {
 	      for (icursor_type icursor = begin<nz>(cursor), 
 		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
 		if (value(*icursor) != 0.0) {
-		  int petscColIdx = mapLocalGlobalDofs[col(*icursor)] * nComponents + j;
+		  int petscColIdx = 
+		    meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
 		  
 		  sendMatrixEntry[sendToRank].
 		    push_back(std::make_pair(petscRowIdx, petscColIdx));
@@ -323,9 +345,9 @@ namespace AMDiS {
 
     // === Send and recv the nnz row structure to/from other ranks. ===
 
-    StdMpi<MatrixNnzEntry> stdMpi(mpiComm, true);
+    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
     stdMpi.send(sendMatrixEntry);
-    stdMpi.recv(sendDofs);
+    stdMpi.recv(meshDistributor->getSendDofs());
     stdMpi.startCommunication<int>(MPI_INT);
 
     // === Evaluate the nnz structure this rank got from other ranks and add it to ===
@@ -338,13 +360,14 @@ namespace AMDiS {
 	  int r = it->second[i].first;
 	  int c = it->second[i].second;
 
-	  int localRowIdx = r - rstart * nComponents;
+	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
 
 	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
 	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
 	     r, localRowIdx, nRankRows, it->first);
 	  
-	  if (c < rstart * nComponents || c >= rstart * nComponents + nRankRows)
+	  if (c < meshDistributor->getRstart() * nComponents || 
+	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
 	    o_nnz[localRowIdx]++;
 	  else
 	    d_nnz[localRowIdx]++;
@@ -359,6 +382,8 @@ namespace AMDiS {
     FUNCNAME("GlobalMatrixSolver::fillPetscMatrix()");
 
     clock_t first = clock();
+    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
+    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
 
     // === Create PETSc vector (rhs, solution and a temporary vector). ===
 
@@ -374,14 +399,14 @@ namespace AMDiS {
     VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
     VecSetType(petscTmpVec, VECMPI);
 
-    if (!d_nnz || lastMeshChangeIndex != lastMeshNnz) {
+    if (!d_nnz || meshDistributor->getLastMeshChangeIndex() != lastMeshNnz) {
       if (d_nnz) {
 	delete [] d_nnz;
 	delete [] o_nnz;
       }
 
       createPetscNnzStructure(mat);
-      lastMeshNnz = lastMeshChangeIndex;
+      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
     }
 
     // === Create PETSc matrix with the computed nnz data structure. ===
@@ -394,8 +419,10 @@ namespace AMDiS {
 #if (DEBUG != 0)
     int a, b;
     MatGetOwnershipRange(petscMatrix, &a, &b);
-    TEST_EXIT(a == rstart * nComponents)("Wrong matrix ownership range!\n");
-    TEST_EXIT(b == rstart * nComponents + nRankRows)("Wrong matrix ownership range!\n");
+    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
+      ("Wrong matrix ownership range!\n");
+    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
+      ("Wrong matrix ownership range!\n");
 #endif
 
     // === Transfer values from DOF matrices to the PETSc matrix. === 
@@ -456,10 +483,11 @@ namespace AMDiS {
     PetscScalar *vecPointer;
     VecGetArray(petscSolVec, &vecPointer);
 
+    int nRankDofs = meshDistributor->getNumberRankDofs();
     for (int i = 0; i < nComponents; i++) {
       DOFVector<double> *dofvec = vec.getDOFVector(i);
       for (int j = 0; j < nRankDofs; j++)
-	(*dofvec)[mapLocalToDofIndex[j]] = vecPointer[j * nComponents + i];      
+	(*dofvec)[meshDistributor->mapLocalToGlobal(j)] = vecPointer[j * nComponents + i];
     }
 
     VecRestoreArray(petscSolVec, &vecPointer);
@@ -467,7 +495,7 @@ namespace AMDiS {
 
     // === Synchronize dofs at common dofs, i.e., dofs that correspond to more ===
     // === than one partition.                                                 ===
-    synchVector(vec);
+    meshDistributor->synchVector(vec);
 
 
     // === Print information about solution process. ===
diff --git a/AMDiS/src/parallel/GlobalMatrixSolver.h b/AMDiS/src/parallel/GlobalMatrixSolver.h
index 165c9056..25228935 100644
--- a/AMDiS/src/parallel/GlobalMatrixSolver.h
+++ b/AMDiS/src/parallel/GlobalMatrixSolver.h
@@ -33,12 +33,14 @@
 #include "petscao.h"
 
 namespace AMDiS {
-
-  class GlobalMatrixSolver : public ParallelDomainBase
+  
+  class GlobalMatrixSolver : public ProblemVec
   {
   public:
-    GlobalMatrixSolver(ProblemVec *problemStat, ProblemInstatVec *problemInstat)
-      : ParallelDomainBase(problemStat, problemInstat),
+    GlobalMatrixSolver(std::string nameStr,
+		       ProblemIterationInterface *problemIteration = NULL)
+      : ProblemVec(nameStr, problemIteration),
+	meshDistributor(NULL),
 	d_nnz(NULL),
 	o_nnz(NULL),
 	lastMeshNnz(0)
@@ -47,7 +49,13 @@ namespace AMDiS {
     ~GlobalMatrixSolver()
     {}
 
-    void solve();
+    void addToMeshDistributor(MeshDistributor&);
+
+    void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
+			   bool assembleMatrix = true,
+			   bool assembleVector = true);
+
+    void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
 
   protected:
     /// Creates a new non zero pattern structure for the Petsc matrix. 
@@ -74,6 +82,8 @@ namespace AMDiS {
     void solvePetscMatrix(SystemVector &vec);
 
   protected:
+    MeshDistributor *meshDistributor;
+
     /// Petsc's matrix structure.
     Mat petscMatrix;
 
@@ -95,7 +105,6 @@ namespace AMDiS {
     int lastMeshNnz;
   };
 
-  typedef GlobalMatrixSolver ParallelDomainVec;
 
 } //namespace AMDiS
 
diff --git a/AMDiS/src/parallel/ParallelDomainBase.cc b/AMDiS/src/parallel/ParallelDomainBase.cc
index 18b68c29..5db0992a 100644
--- a/AMDiS/src/parallel/ParallelDomainBase.cc
+++ b/AMDiS/src/parallel/ParallelDomainBase.cc
@@ -36,69 +36,38 @@ namespace AMDiS {
   }
 
 
-  ParallelDomainBase::ParallelDomainBase(ProblemVec *problemStat,
-					 ProblemInstatVec *problemInstat)
-    : iterationIF(problemStat),
-      timeIF(problemInstat),
-      probStat(problemStat),
-      name(problemStat->getName()),
-      feSpace(problemStat->getFeSpace(0)),
-      mesh(feSpace->getMesh()),
-      refineManager(problemStat->getRefinementManager(0)),
-      info(problemStat->getInfo()),
+  MeshDistributor::MeshDistributor(std::string str)
+    : probStat(0),
+      name(str),
+      feSpace(NULL),
+      mesh(NULL),
+      refineManager(NULL),
+      info(10),
+      partitioner(NULL),
       initialPartitionMesh(true),
       nRankDofs(0),
+      nOverallDofs(0),
       rstart(0),
-      nComponents(problemStat->getNumComponents()),
       deserialized(false),
       lastMeshChangeIndex(0)
   {
-    FUNCNAME("ParallelDomainBase::ParalleDomainBase()");
-
-    TEST_EXIT(mesh->getNumberOfDOFAdmin() == 1)
-      ("Only meshes with one DOFAdmin are supported!\n");
-    TEST_EXIT(mesh->getDOFAdmin(0).getNumberOfPreDOFs(0) == 0)
-      ("Wrong pre dof number for DOFAdmin!\n");
+    FUNCNAME("MeshDistributor::ParalleDomainBase()");
 
     mpiRank = MPI::COMM_WORLD.Get_rank();
     mpiSize = MPI::COMM_WORLD.Get_size();
     mpiComm = MPI::COMM_WORLD;
-    partitioner = new ParMetisPartitioner(mesh, &mpiComm);
-
-    // Test if all fe spaces are equal. Yet, different fe spaces are not supported for
-    // domain parallelization.
-    const FiniteElemSpace *fe = probStat->getFeSpace(0);
-    for (int i = 0; i < nComponents; i++)
-      TEST_EXIT(fe == probStat->getFeSpace(i))
-	("Parallelization does not supported different FE spaces!\n");
-
-    // Create parallel serialization file writer, if needed.
-    int writeSerialization = 0;
-    GET_PARAMETER(0, name + "->output->write serialization", "%d", &writeSerialization);
-    if (writeSerialization)
-      probStat->getFileWriterList().push_back(new Serializer<ParallelDomainBase>(this));
-
-    int readSerialization = 0;
-    GET_PARAMETER(0, name + "->input->read serialization", "%d", &readSerialization);
-    if (readSerialization) {
-      std::string filename = "";
-      GET_PARAMETER(0, name + "->input->serialization filename", &filename);
-      filename += ".p" + lexical_cast<std::string>(mpiRank);
-      MSG("Start serialization with %s\n", filename.c_str());
-      std::ifstream in(filename.c_str());
-      deserialize(in);
-      in.close();
-    }
   }
 
 
-  void ParallelDomainBase::initParallelization(AdaptInfo *adaptInfo)
+  void MeshDistributor::initParallelization(AdaptInfo *adaptInfo)
   {
-    FUNCNAME("ParallelDomainBase::initParallelization()");
+    FUNCNAME("MeshDistributor::initParallelization()");
 
     TEST_EXIT(mpiSize > 1)
       ("Parallelization does not work with only one process!\n");
 
+    TEST_EXIT(mesh)("No mesh has been defined for mesh distribution!\n");
+
     // If the problem has been already read from a file, we do not need to do anything.
     if (deserialized)
       return;
@@ -188,33 +157,41 @@ namespace AMDiS {
 
     /// === Set DOF rank information to all matrices and vectors. ===
 
-    for (int i = 0; i < nComponents; i++) {
-      for (int j = 0; j < nComponents; j++)
- 	if (probStat->getSystemMatrix(i, j))
- 	  probStat->getSystemMatrix(i, j)->setRankDofs(isRankDof);      
-
-      TEST_EXIT_DBG(probStat->getRhs()->getDOFVector(i))("No rhs vector!\n");
-      TEST_EXIT_DBG(probStat->getSolution()->getDOFVector(i))("No solution vector!\n");
+    for (unsigned int i = 0; i < probStat.size(); i++) {
+      int nComponents = probStat[i]->getNumComponents();
+      for (int j = 0; j < nComponents; j++) {
+	for (int k = 0; k < nComponents; k++)
+	  if (probStat[i]->getSystemMatrix(j, k))
+	    probStat[i]->getSystemMatrix(j, k)->setRankDofs(isRankDof);
 
-      probStat->getRhs()->getDOFVector(i)->setRankDofs(isRankDof);
-      probStat->getSolution()->getDOFVector(i)->setRankDofs(isRankDof);
+	TEST_EXIT_DBG(probStat[i]->getRhs()->getDOFVector(j))("No RHS vector!\n");
+	TEST_EXIT_DBG(probStat[i]->getSolution()->getDOFVector(j))("No solution vector!\n");
+	
+	probStat[i]->getRhs()->getDOFVector(j)->setRankDofs(isRankDof);
+	probStat[i]->getSolution()->getDOFVector(j)->setRankDofs(isRankDof);
+      }
     }
 
+
     // === Remove periodic boundary conditions in sequential problem definition. ===
 
     // Remove periodic boundaries in boundary manager on matrices and vectors.
-    for (int i = 0; i < nComponents; i++) {
+    for (unsigned int i = 0; i < probStat.size(); i++) {
+      int nComponents = probStat[i]->getNumComponents();
+
       for (int j = 0; j < nComponents; j++) {
-	DOFMatrix* mat = probStat->getSystemMatrix(i, j);
- 	if (mat && mat->getBoundaryManager())
-	  removeBoundaryCondition(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));
+	for (int k = 0; k < nComponents; k++) {
+	  DOFMatrix* mat = probStat[i]->getSystemMatrix(j, k);
+	  if (mat && mat->getBoundaryManager())
+	    removeBoundaryCondition(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));
+	}
+	
+	if (probStat[i]->getSolution()->getDOFVector(j)->getBoundaryManager())
+	  removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probStat[i]->getSolution()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
+	
+	if (probStat[i]->getRhs()->getDOFVector(i)->getBoundaryManager())
+	  removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probStat[i]->getRhs()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
       }
-
-      if (probStat->getSolution()->getDOFVector(i)->getBoundaryManager())
-	removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probStat->getSolution()->getDOFVector(i)->getBoundaryManager()->getBoundaryConditionMap()));
-
-      if (probStat->getRhs()->getDOFVector(i)->getBoundaryManager())
-	removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probStat->getRhs()->getDOFVector(i)->getBoundaryManager()->getBoundaryConditionMap()));
     }
 
     // Remove periodic boundaries on elements in mesh.
@@ -227,13 +204,72 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::exitParallelization(AdaptInfo *adaptInfo)
+  void MeshDistributor::addProblemStat(ProblemVec *probVec)
+  {
+    FUNCNAME("MeshDistributor::addProblemVec()");
+
+    if (feSpace != NULL) {
+      std::vector<FiniteElemSpace*> feSpaces = probVec->getFeSpaces();
+      for (unsigned int i = 0; i < feSpaces.size(); i++) {
+	TEST_EXIT(feSpace == feSpaces[i])
+	  ("Parallelizaton is not supported for multiple FE spaces!\n");
+      }
+    } else {
+      feSpace = probVec->getFeSpace(0);
+      mesh = feSpace->getMesh();
+      info = probVec->getInfo();
+      
+      TEST_EXIT(mesh->getNumberOfDOFAdmin() == 1)
+	("Only meshes with one DOFAdmin are supported!\n");
+      TEST_EXIT(mesh->getDOFAdmin(0).getNumberOfPreDOFs(0) == 0)
+	("Wrong pre dof number for DOFAdmin!\n");
+      
+      switch (mesh->getDim()) {
+      case 2:
+	refineManager = new RefinementManager2d();
+	break;
+      case 3:
+	refineManager = new RefinementManager3d();
+	break;
+      default:
+	ERROR_EXIT("This should not happen for dim = %d!\n", mesh->getDim());
+      }
+
+      partitioner = new ParMetisPartitioner(mesh, &mpiComm);
+    }
+
+    // Create parallel serialization file writer, if needed.
+    int writeSerialization = 0;
+    GET_PARAMETER(0, probVec->getName() + "->output->write serialization", "%d", &writeSerialization);
+    if (writeSerialization)
+      probVec->getFileWriterList().push_back(new Serializer<MeshDistributor>(this));
+
+    int readSerialization = 0;
+    GET_PARAMETER(0, probVec->getName() + "->input->read serialization", "%d", &readSerialization);
+    if (readSerialization) {
+      ERROR_EXIT("Must be reimplemented!\n");
+#if 0      
+      std::string filename = "";
+      GET_PARAMETER(0, probVec->getName() + "->input->serialization filename", &filename);
+      filename += ".p" + lexical_cast<std::string>(mpiRank);
+      MSG("Start serialization with %s\n", filename.c_str());
+      std::ifstream in(filename.c_str());
+      deserialize(in);
+      in.close();
+#endif
+    }
+
+    probStat.push_back(probVec);
+  }
+
+
+  void MeshDistributor::exitParallelization(AdaptInfo *adaptInfo)
   {}
 
   
-  void ParallelDomainBase::updateDofAdmins()
+  void MeshDistributor::updateDofAdmins()
   {
-    FUNCNAME("ParallelDomainBase::updateDofAdmins()");
+    FUNCNAME("MeshDistributor::updateDofAdmins()");
 
     for (int i = 0; i < mesh->getNumberOfDOFAdmin(); i++) {
       DOFAdmin& admin = const_cast<DOFAdmin&>(mesh->getDOFAdmin(i));
@@ -255,9 +291,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::testForMacroMesh()
+  void MeshDistributor::testForMacroMesh()
   {
-    FUNCNAME("ParallelDomainBase::testForMacroMesh()");
+    FUNCNAME("MeshDistributor::testForMacroMesh()");
 
     int nMacroElements = 0;
 
@@ -277,7 +313,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::synchVector(DOFVector<double> &vec)
+  void MeshDistributor::synchVector(DOFVector<double> &vec)
   {
     StdMpi<std::vector<double> > stdMpi(mpiComm);
 
@@ -306,8 +342,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::synchVector(SystemVector &vec)
+  void MeshDistributor::synchVector(SystemVector &vec)
   {
+    int nComponents = vec.getSize();
     StdMpi<std::vector<double> > stdMpi(mpiComm);
 
     for (RankToDofContainer::iterator sendIt = sendDofs.begin();
@@ -346,7 +383,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::removeBoundaryCondition(BoundaryIndexMap& boundaryMap)
+  void MeshDistributor::removeBoundaryCondition(BoundaryIndexMap& boundaryMap)
   {
     BoundaryIndexMap::iterator it = boundaryMap.begin();
     while (it != boundaryMap.end()) {
@@ -358,9 +395,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::checkMeshChange()
+  void MeshDistributor::checkMeshChange()
   {
-    FUNCNAME("ParallelDomainBase::checkMeshChange()");
+    FUNCNAME("MeshDistributor::checkMeshChange()");
 
     // === If mesh has not been changed on all ranks, return. ===
 
@@ -423,9 +460,9 @@ namespace AMDiS {
   }
 
   
-  bool ParallelDomainBase::checkAndAdaptBoundary(RankToBoundMap &allBound)
+  bool MeshDistributor::checkAndAdaptBoundary(RankToBoundMap &allBound)
   {
-    FUNCNAME("ParallelDomainBase::checkAndAdaptBoundary()");
+    FUNCNAME("MeshDistributor::checkAndAdaptBoundary()");
 
     // === Create mesh structure codes for all ranks boundary elements. ===
        
@@ -481,13 +518,13 @@ namespace AMDiS {
   }
 
 
-  bool ParallelDomainBase::fitElementToMeshCode(MeshStructure &code, 
+  bool MeshDistributor::fitElementToMeshCode(MeshStructure &code, 
 						Element *el, 
 						GeoIndex subObj,
 						int ithObj, 
 						int elType)
   {
-    FUNCNAME("ParallelDomainBase::fitElementToMeshCode()");
+    FUNCNAME("MeshDistributor::fitElementToMeshCode()");
 
     TEST_EXIT_DBG(el)("No element given!\n");
 
@@ -563,13 +600,13 @@ namespace AMDiS {
   }
 
 
-  bool ParallelDomainBase::fitElementToMeshCode2(MeshStructure &code, 
+  bool MeshDistributor::fitElementToMeshCode2(MeshStructure &code, 
 						 TraverseStack &stack,
 						 GeoIndex subObj,
 						 int ithObj, 
 						 int elType)
   {
-    FUNCNAME("ParallelDomainBase::fitElementToMeshCode2()");
+    FUNCNAME("MeshDistributor::fitElementToMeshCode2()");
 
     ElInfo *elInfo = stack.getElInfo();
 
@@ -632,7 +669,7 @@ namespace AMDiS {
   }
 
   
-  void ParallelDomainBase::serialize(std::ostream &out, DofContainer &data)
+  void MeshDistributor::serialize(std::ostream &out, DofContainer &data)
   {    
     int vecSize = data.size();
     SerUtil::serialize(out, vecSize);
@@ -643,10 +680,10 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::deserialize(std::istream &in, DofContainer &data,
+  void MeshDistributor::deserialize(std::istream &in, DofContainer &data,
 				       std::map<int, const DegreeOfFreedom*> &dofMap)
   {
-    FUNCNAME("ParallelDomainBase::deserialize()");
+    FUNCNAME("MeshDistributor::deserialize()");
 
     int vecSize = 0;
     SerUtil::deserialize(in, vecSize);
@@ -663,7 +700,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::serialize(std::ostream &out, RankToDofContainer &data)
+  void MeshDistributor::serialize(std::ostream &out, RankToDofContainer &data)
   {
     int mapSize = data.size();
     SerUtil::serialize(out, mapSize);
@@ -675,7 +712,7 @@ namespace AMDiS {
   }
 
   
-  void ParallelDomainBase::deserialize(std::istream &in, RankToDofContainer &data,
+  void MeshDistributor::deserialize(std::istream &in, RankToDofContainer &data,
 				       std::map<int, const DegreeOfFreedom*> &dofMap)
   {
     int mapSize = 0;
@@ -688,9 +725,9 @@ namespace AMDiS {
   }
 
 
-  double ParallelDomainBase::setElemWeights(AdaptInfo *adaptInfo) 
+  double MeshDistributor::setElemWeights(AdaptInfo *adaptInfo) 
   {
-    FUNCNAME("ParallelDomainBase::setElemWeights()");
+    FUNCNAME("MeshDistributor::setElemWeights()");
 
     double localWeightSum = 0.0;
     elemWeights.clear();
@@ -729,9 +766,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::partitionMesh(AdaptInfo *adaptInfo)
+  void MeshDistributor::partitionMesh(AdaptInfo *adaptInfo)
   {
-    FUNCNAME("ParallelDomainBase::partitionMesh()");
+    FUNCNAME("MeshDistributor::partitionMesh()");
 
     if (initialPartitionMesh) {
       initialPartitionMesh = false;
@@ -746,18 +783,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::solveInitialProblem(AdaptInfo *adaptInfo)
-  {     
-    if (timeIF)
-      timeIF->solveInitialProblem(adaptInfo);
-    
-    synchVector(*(probStat->getSolution()));
-  }
-
-
-  void ParallelDomainBase::createInteriorBoundaryInfo()
+  void MeshDistributor::createInteriorBoundaryInfo()
   {
-    FUNCNAME("ParallelDomainBase::createInteriorBoundaryInfo()");
+    FUNCNAME("MeshDistributor::createInteriorBoundaryInfo()");
 
     // === First, create the interior boundaries based on macro element's  ===
     // === neighbour informations.                                         ===
@@ -867,9 +895,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::createMacroElementInteriorBoundaryInfo()
+  void MeshDistributor::createMacroElementInteriorBoundaryInfo()
   {
-    FUNCNAME("ParallelDomainBase::createMacroElementInteriorBoundaryInfo()");
+    FUNCNAME("MeshDistributor::createMacroElementInteriorBoundaryInfo()");
 
     int nNeighbours = mesh->getGeo(NEIGH);
     int dim = mesh->getDim();
@@ -953,9 +981,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::createSubstructureInteriorBoundaryInfo()
+  void MeshDistributor::createSubstructureInteriorBoundaryInfo()
   {
-    FUNCNAME("ParallelDomainBase::createSubstructureInteriorBoundaryInfo()");
+    FUNCNAME("MeshDistributor::createSubstructureInteriorBoundaryInfo()");
 
     // === Seach for all vertices/edges, which are part of an interior boundary,  ===
     // === but are not part of the interior boundaries that are created based on  ===
@@ -1280,9 +1308,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::removeMacroElements()
+  void MeshDistributor::removeMacroElements()
   {
-    FUNCNAME("ParallelDomainBase::removeMacroElements()");
+    FUNCNAME("MeshDistributor::removeMacroElements()");
 
     std::set<MacroElement*> macrosToRemove;
     for (std::deque<MacroElement*>::iterator it = mesh->firstMacroElement();
@@ -1299,9 +1327,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::createLocalGlobalNumbering()
+  void MeshDistributor::createLocalGlobalNumbering()
   {
-    FUNCNAME("ParallelDomainBase::createLocalGlobalNumbering()");
+    FUNCNAME("MeshDistributor::createLocalGlobalNumbering()");
 
     // === Get rank information about DOFs. ===
 
@@ -1313,7 +1341,7 @@ namespace AMDiS {
     createDofMemberInfo(partitionDofs, rankDofs, rankAllDofs, boundaryDofs, vertexDof);
 
     nRankDofs = rankDofs.size();
-    int nOverallDofs = partitionDofs.size();
+    nOverallDofs = partitionDofs.size();
 
 
     // === Get starting position for global rank dof ordering. ====
@@ -1469,16 +1497,13 @@ namespace AMDiS {
     createLocalMappings(rankDofsNewLocalIndex, rankOwnedDofsNewLocalIndex,
 			rankDofsNewGlobalIndex);
 
-    nRankRows = nRankDofs * nComponents;
-    nOverallRows = nOverallDofs * nComponents;
-
     lastMeshChangeIndex = mesh->getChangeIndex();
   }
 
 
-  void ParallelDomainBase::updateLocalGlobalNumbering()
+  void MeshDistributor::updateLocalGlobalNumbering()
   {
-    FUNCNAME("ParallelDomainBase::updateLocalGlobalNumbering()");
+    FUNCNAME("MeshDistributor::updateLocalGlobalNumbering()");
 
 #if (DEBUG != 0)
     debug::ElementIdxToDofs elMap;
@@ -1604,7 +1629,7 @@ namespace AMDiS {
 
     // === Calculate number of overall DOFs of all partitions. ===
 
-    int nOverallDofs = 0;
+    nOverallDofs = 0;
     mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
 
 
@@ -1672,9 +1697,6 @@ namespace AMDiS {
     createLocalMappings(rankDofsNewLocalIndex, rankOwnedDofsNewLocalIndex,
 			rankDofsNewGlobalIndex);
 
-    nRankRows = nRankDofs * nComponents;
-    nOverallRows = nOverallDofs * nComponents;
-
     // === Update dof admins due to new number of dofs. ===
   
     updateDofAdmins();
@@ -1689,7 +1711,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::createLocalMappings(DofIndexMap &rankDofsNewLocalIndex,
+  void MeshDistributor::createLocalMappings(DofIndexMap &rankDofsNewLocalIndex,
 					       DofIndexMap &rankOwnedDofsNewLocalIndex,
 					       DofIndexMap &rankDofsNewGlobalIndex)
   {
@@ -1711,7 +1733,7 @@ namespace AMDiS {
   }
 
   
-  void ParallelDomainBase::createDofMemberInfo(DofToPartitions& partitionDofs,
+  void MeshDistributor::createDofMemberInfo(DofToPartitions& partitionDofs,
 					       DofContainer& rankOwnedDofs,
 					       DofContainer& rankAllDofs,
 					       DofToRank& boundaryDofs,
@@ -1799,9 +1821,9 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::createPeriodicMap()
+  void MeshDistributor::createPeriodicMap()
   {
-    FUNCNAME("ParallelDomainBase::createPeriodicMap()");
+    FUNCNAME("MeshDistributor::createPeriodicMap()");
 
     if (periodicBoundary.boundary.size() == 0)
       return;
@@ -1923,64 +1945,7 @@ namespace AMDiS {
   }
 
 
-  Flag ParallelDomainBase::oneIteration(AdaptInfo *adaptInfo, Flag toDo)
-  {
-    FUNCNAME("ParallelDomainBase::oneIteration()");
-
-    Flag flag = buildAndAdapt(adaptInfo, toDo);
-
-    if (toDo.isSet(SOLVE))
-      solve();
-
-    if (toDo.isSet(SOLVE_RHS))
-      ERROR_EXIT("Not yet implemented!\n");
-
-    if (toDo.isSet(ESTIMATE))
-      iterationIF->getProblem()->estimate(adaptInfo);
-
-    return flag;
-  }
-
-
-  Flag ParallelDomainBase::buildAndAdapt(AdaptInfo *adaptInfo, Flag toDo)
-  {
-    FUNCNAME("ParallelDomainBase::buildAndAdapt()");
-
-    Flag flag = 0, markFlag = 0;
-    ProblemStatBase *problem = iterationIF->getProblem();
-
-    if (toDo.isSet(MARK))
-      markFlag = problem->markElements(adaptInfo);
-    else
-      markFlag = 3;
-
-    if (toDo.isSet(BUILD))
-      problem->buildBeforeRefine(adaptInfo, markFlag);
-
-    // refine
-    if (toDo.isSet(ADAPT) && markFlag.isSet(MESH_REFINED))
-      flag = problem->refineMesh(adaptInfo);
-
-    if (toDo.isSet(BUILD))
-      problem->buildBeforeCoarsen(adaptInfo, markFlag);
-
-    // coarsen
-    if (toDo.isSet(ADAPT) && markFlag.isSet(MESH_COARSENED))
-      flag |= problem->coarsenMesh(adaptInfo);
-
-    checkMeshChange();
-
-    if (toDo.isSet(BUILD))
-      problem->buildAfterCoarsen(adaptInfo, markFlag, true, true);
-
-    if (toDo.isSet(BUILD_RHS))
-      problem->buildAfterCoarsen(adaptInfo, markFlag, false, true);
-
-    return flag;
-  }
-
-
-  void ParallelDomainBase::serialize(std::ostream &out)
+  void MeshDistributor::serialize(std::ostream &out)
   {
     SerUtil::serialize(out, elemWeights);
     SerUtil::serialize(out, initialPartitionMesh);
@@ -2002,14 +1967,10 @@ namespace AMDiS {
     serialize(out, periodicDof);
 
     SerUtil::serialize(out, rstart);
-    SerUtil::serialize(out, nRankRows);
-    SerUtil::serialize(out, nOverallRows);
-
-    probStat->serialize(out);
   }
 
 
-  void ParallelDomainBase::deserialize(std::istream &in)
+  void MeshDistributor::deserialize(std::istream &in)
   {
     SerUtil::deserialize(in, elemWeights);
     SerUtil::deserialize(in, initialPartitionMesh);
@@ -2048,16 +2009,12 @@ namespace AMDiS {
     deserialize(in, vertexDof, dofMap);
 
     SerUtil::deserialize(in, rstart);
-    SerUtil::deserialize(in, nRankRows);
-    SerUtil::deserialize(in, nOverallRows);
 
     deserialized = true;
-
-    probStat->deserialize(in);
   }
 
 
-  void ParallelDomainBase::serialize(std::ostream &out, PeriodicDofMap &data)
+  void MeshDistributor::serialize(std::ostream &out, PeriodicDofMap &data)
   {
     int mapSize = data.size();
     SerUtil::serialize(out, mapSize);
@@ -2071,7 +2028,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::deserialize(std::istream &in, PeriodicDofMap &data)
+  void MeshDistributor::deserialize(std::istream &in, PeriodicDofMap &data)
   {
     data.clear();
 
@@ -2089,9 +2046,9 @@ namespace AMDiS {
   }
 
  
-  void ParallelDomainBase::writePartitioningMesh(std::string filename)
+  void MeshDistributor::writePartitioningMesh(std::string filename)
   {
-    FUNCNAME("ParallelDomainBase::writePartitioningMesh()");
+    FUNCNAME("MeshDistributor::writePartitioningMesh()");
 
     std::map<int, double> vec;    
     TraverseStack stack;
diff --git a/AMDiS/src/parallel/ParallelDomainBase.h b/AMDiS/src/parallel/ParallelDomainBase.h
index e21b614e..aae1f2a8 100644
--- a/AMDiS/src/parallel/ParallelDomainBase.h
+++ b/AMDiS/src/parallel/ParallelDomainBase.h
@@ -42,8 +42,7 @@ namespace AMDiS {
   
   class ParMetisPartitioner;
 
-  class ParallelDomainBase : public ProblemIterationInterface,
-			     public ProblemTimeInterface
+  class MeshDistributor
   {
   protected:
     /// Defines a mapping type from DOFs to rank numbers.
@@ -74,14 +73,15 @@ namespace AMDiS {
     typedef std::vector<MeshStructure> MeshCodeVec;
 
   public:
-    ParallelDomainBase(ProblemVec *problemStat,
-		       ProblemInstatVec *problemInstat);
-		       
-    virtual ~ParallelDomainBase() {}
+    MeshDistributor(std::string str);
+		          
+    virtual ~MeshDistributor() {}
 
-    virtual void initParallelization(AdaptInfo *adaptInfo);
+    void initParallelization(AdaptInfo *adaptInfo);
 
-    virtual void exitParallelization(AdaptInfo *adaptInfo);
+    void exitParallelization(AdaptInfo *adaptInfo);
+
+    void addProblemStat(ProblemVec *probVec);
 
     /** \brief
      * This function checks if the mesh has changed on at least on rank. In this case,
@@ -107,74 +107,97 @@ namespace AMDiS {
 
     void partitionMesh(AdaptInfo *adaptInfo);
 
-    virtual void setTime(AdaptInfo *adaptInfo) 
+    inline virtual std::string getName() 
+    { 
+      return name; 
+    }
+    
+    /// Returns \ref nRankDOFs, the number of DOFs in the rank mesh.
+    inline int getNumberRankDofs() 
     {
-      if (timeIF) 
-	timeIF->setTime(adaptInfo);      
+      return nRankDofs;
     }
 
-    virtual void initTimestep(AdaptInfo *adaptInfo) 
+    inline int getNumberOverallDofs()
     {
-      if (timeIF) 
-	timeIF->initTimestep(adaptInfo);
+      return nOverallDofs;
     }
 
-    virtual void closeTimestep(AdaptInfo *adaptInfo) 
+    inline DegreeOfFreedom mapLocalToGlobal(DegreeOfFreedom dof)
     {
-      if (timeIF) 
-	timeIF->closeTimestep(adaptInfo);
+      return mapLocalGlobalDofs[dof];
     }
 
-    virtual void solveInitialProblem(AdaptInfo *adaptInfo);
-  
-    virtual void transferInitialSolution(AdaptInfo *adaptInfo) 
+    inline std::set<DegreeOfFreedom>& getPeriodicDof(DegreeOfFreedom dof)
     {
-      if (timeIF) 
-	timeIF->transferInitialSolution(adaptInfo);
+      return periodicDof[dof];
     }
 
-    virtual void beginIteration(AdaptInfo *adaptInfo) 
+    inline PeriodicDofMap& getPeriodicDofMap()
     {
-      iterationIF->beginIteration(adaptInfo);
+      return periodicDof;
     }
 
-    virtual Flag oneIteration(AdaptInfo *adaptInfo, Flag toDo = FULL_ITERATION);
-
-    virtual Flag buildAndAdapt(AdaptInfo *adaptInfo, Flag toDo);
+    inline bool getIsRankDof(DegreeOfFreedom dof)
+    {
+      return isRankDof[dof];
+    }
 
-    virtual void endIteration(AdaptInfo *adaptInfo) 
+    inline long getLastMeshChangeIndex()
     {
-      iterationIF->endIteration(adaptInfo);
+      return lastMeshChangeIndex;
     }
 
-    virtual void solve() = 0;
+    inline int getRstart()
+    {
+      return rstart;
+    }
 
-    virtual int getNumProblems() 
+    inline int getMpiRank()
     {
-      return 0;
+      return mpiRank;
     }
 
-    inline virtual std::string getName() 
-    { 
-      return name; 
+    inline MPI::Intracomm& getMpiComm()
+    {
+      return mpiComm;
     }
 
-    /// Returns \ref nRankDOFs, the number of DOFs in the rank mesh.
-    int getNumberRankDofs() 
+    inline RankToDofContainer& getSendDofs()
     {
-      return nRankDofs;
+      return sendDofs;
     }
 
-    virtual ProblemStatBase *getProblem(int number = 0) 
+    inline RankToDofContainer& getRecvDofs()
     {
-      return NULL;
+      return recvDofs;
     }
 
     // Writes all data of this object to an output stream.
-    virtual void serialize(std::ostream &out);
+    void serialize(std::ostream &out);
 
     // Reads the object data from an input stream.
-    virtual void deserialize(std::istream &in);
+    void deserialize(std::istream &in);
+
+    /** \brief
+     * This function must be used if the values of a DOFVector must be synchronised
+     * over all ranks. That means, that each rank sends the values of the DOFs, which
+     * are owned by the rank and lie on an interior bounday, to all other ranks also
+     * having these DOFs.
+     *
+     * This function must be used, for example, after the lineary system is solved, or
+     * after the DOFVector is set by some user defined functions, e.g., initial
+     * solution functions.
+     */    
+    void synchVector(DOFVector<double> &vec);
+
+    /** \brief
+     * Works in the same way as the function above defined for DOFVectors. Due to
+     * performance, this function does not call \ref synchVector for each DOFVector,
+     * but instead sends all values of all DOFVectors all at once.
+     */
+    void synchVector(SystemVector &vec);
+
 
   protected:
     /** \brief
@@ -271,25 +294,6 @@ namespace AMDiS {
      */
     void writePartitioningMesh(std::string filename);
 
-    /** \brief
-     * This function must be used if the values of a DOFVector must be synchronised
-     * over all ranks. That means, that each rank sends the values of the DOFs, which
-     * are owned by the rank and lie on an interior bounday, to all other ranks also
-     * having these DOFs.
-     *
-     * This function must be used, for example, after the lineary system is solved, or
-     * after the DOFVector is set by some user defined functions, e.g., initial
-     * solution functions.
-     */    
-    void synchVector(DOFVector<double> &vec);
-
-    /** \brief
-     * Works in the same way as the function above defined for DOFVectors. Due to
-     * performance, this function does not call \ref synchVector for each DOFVector,
-     * but instead sends all values of all DOFVectors all at once.
-     */
-    void synchVector(SystemVector &vec);
-
     // Removes all periodic boundaries from a given boundary map.
     void removeBoundaryCondition(BoundaryIndexMap& boundaryMap);
 
@@ -369,7 +373,7 @@ namespace AMDiS {
     ProblemTimeInterface *timeIF;
 
     ///
-    ProblemVec *probStat;
+    std::vector<ProblemVec*> probStat;
 
     /// The rank of the current process.
     int mpiRank;
@@ -427,6 +431,9 @@ namespace AMDiS {
     /// Number of DOFs in the rank mesh.
     int nRankDofs;
 
+    ///
+    int nOverallDofs;
+
     /** \brief 
      * Defines the interior boundaries of the domain that result from partitioning
      * the whole mesh. Contains only the boundaries, which are owned by the rank, i.e.,
@@ -490,18 +497,6 @@ namespace AMDiS {
     /// Is the index of the first row of the linear system, which is owned by the rank.
     int rstart;
 
-    /** \brief
-     * Number of components of the equation. Is used to calculate the exact number
-     * of rows in the the overall linear system.
-     */
-    int nComponents;
-
-    /// Number of rows of the whole linear system that are stored on this rank.
-    int nRankRows;
-
-    /// Overall number of the rows in the lineary system.
-    int nOverallRows;
-
     /** \brief
      * If the problem definition has been read from a serialization file, this 
      * variable is true, otherwise it is false. This variable is used to stop the
diff --git a/AMDiS/src/parallel/ParallelDomainDbg.cc b/AMDiS/src/parallel/ParallelDomainDbg.cc
index feba8923..bbba1f80 100644
--- a/AMDiS/src/parallel/ParallelDomainDbg.cc
+++ b/AMDiS/src/parallel/ParallelDomainDbg.cc
@@ -9,11 +9,11 @@
 
 namespace AMDiS {
 
-  void ParallelDomainDbg::testInteriorBoundary(ParallelDomainBase &pdb)
+  void ParallelDomainDbg::testInteriorBoundary(MeshDistributor &pdb)
   {
     FUNCNAME("ParallelDomainDbg::testInteriorBoundary()");
 
-    typedef ParallelDomainBase::RankToBoundMap RankToBoundMap;
+    typedef MeshDistributor::RankToBoundMap RankToBoundMap;
 
     std::vector<int*> sendBuffers, recvBuffers;
 
@@ -69,7 +69,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::testCommonDofs(ParallelDomainBase &pdb, bool printCoords)
+  void ParallelDomainDbg::testCommonDofs(MeshDistributor &pdb, bool printCoords)
   {
     FUNCNAME("ParallelDomainDbg::testCommonDofs()");
 
@@ -209,7 +209,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::testAllElements(ParallelDomainBase &pdb)
+  void ParallelDomainDbg::testAllElements(MeshDistributor &pdb)
   {
     FUNCNAME("ParallelDomainDbg::testAllElements()");
 
@@ -240,7 +240,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::testDofContainerCommunication(ParallelDomainBase &pdb, 
+  void ParallelDomainDbg::testDofContainerCommunication(MeshDistributor &pdb, 
 							RankToDofContainer &sendDofs,
 							RankToDofContainer &recvDofs)
   {
@@ -264,7 +264,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::printMapLocalGlobal(ParallelDomainBase &pdb, int rank)
+  void ParallelDomainDbg::printMapLocalGlobal(MeshDistributor &pdb, int rank)
   {    
     if (rank == -1 || pdb.mpiRank == rank) {
       std::cout << "====== DOF MAP LOCAL -> GLOBAL ====== " << std::endl;
@@ -307,7 +307,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::printMapPeriodic(ParallelDomainBase &pdb, int rank)
+  void ParallelDomainDbg::printMapPeriodic(MeshDistributor &pdb, int rank)
   {
     FUNCNAME("ParallelDomainDbg::printMapPeriodic()");
 
@@ -341,7 +341,7 @@ namespace AMDiS {
   }
 
   
-  void ParallelDomainDbg::printRankDofs(ParallelDomainBase &pdb, 
+  void ParallelDomainDbg::printRankDofs(MeshDistributor &pdb, 
 					int rank, 
 					DofContainer& rankDofs,
 					DofContainer& rankAllDofs)
@@ -370,7 +370,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainDbg::printBoundaryInfo(ParallelDomainBase &pdb)
+  void ParallelDomainDbg::printBoundaryInfo(MeshDistributor &pdb)
   {
     FUNCNAME("ParallelDomainDbg::printBoundaryInfo()");
 
diff --git a/AMDiS/src/parallel/ParallelDomainDbg.h b/AMDiS/src/parallel/ParallelDomainDbg.h
index e00f4571..1102dd98 100644
--- a/AMDiS/src/parallel/ParallelDomainDbg.h
+++ b/AMDiS/src/parallel/ParallelDomainDbg.h
@@ -29,7 +29,7 @@ namespace AMDiS {
   class ParallelDomainDbg 
   {
   protected:
-    typedef ParallelDomainBase::RankToDofContainer RankToDofContainer;
+    typedef MeshDistributor::RankToDofContainer RankToDofContainer;
 
   public:
     /** \brief
@@ -37,7 +37,7 @@ namespace AMDiS {
      *
      * \param[in]  pdb   Parallel problem definition used for debugging.
      */
-    static void testInteriorBoundary(ParallelDomainBase &pdb);
+    static void testInteriorBoundary(MeshDistributor &pdb);
 
     /** \brief
      * This function is used for debugging only. It traverses all interior boundaries
@@ -49,14 +49,14 @@ namespace AMDiS {
      * \param[in]  printCoords   If true, the coords of all common dofs are printed
      *                           to the screen.
      */
-    static void testCommonDofs(ParallelDomainBase &pdb, bool printCoords = false);
+    static void testCommonDofs(MeshDistributor &pdb, bool printCoords = false);
 
     /** \brief
      * Tests if all elements in the macro mesh are memeber of exactly one rank.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
      */
-    static void testAllElements(ParallelDomainBase &pdb);
+    static void testAllElements(MeshDistributor &pdb);
 
     /** \brief
      * Tests for all ranks simultaneously, if the number of all send and received 
@@ -66,7 +66,7 @@ namespace AMDiS {
      * \param[in]  sendDofs   The map of all DOFs the rank will send.
      * \param[in]  recvDofs   The map of all DOFs the rank will receive.
      */
-    static void testDofContainerCommunication(ParallelDomainBase &pdb, 
+    static void testDofContainerCommunication(MeshDistributor &pdb, 
 					      RankToDofContainer &sendDofs,
 					      RankToDofContainer &recvDofs);
 
@@ -78,7 +78,7 @@ namespace AMDiS {
      * \param[in]  rank    If specified, only the information from the given rank 
      *                     is printed.
      */
-    static void printMapLocalGlobal(ParallelDomainBase &pdb, int rank = -1);
+    static void printMapLocalGlobal(MeshDistributor &pdb, int rank = -1);
 
     /** \brief
      * This function is used for debugging only. It prints all information about
@@ -88,7 +88,7 @@ namespace AMDiS {
      * \param[in] rank   If specified, only the information from the given rank 
      *                   is printed.
      */
-    static void printMapPeriodic(ParallelDomainBase &pdb, int rank = -1);
+    static void printMapPeriodic(MeshDistributor &pdb, int rank = -1);
 
     /** \brief
      * This function is used for debugging only. It prints information about dofs
@@ -101,7 +101,7 @@ namespace AMDiS {
      *                           by rank.
      * \param[in]  rankAllDofs   List of all dofs in ranks partition.
      */
-    static void printRankDofs(ParallelDomainBase &pdb,
+    static void printRankDofs(MeshDistributor &pdb,
 			      int rank, 
 			      DofContainer& rankDofs, 
 			      DofContainer& rankAllDofs);
@@ -111,7 +111,7 @@ namespace AMDiS {
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
      */
-    static void printBoundaryInfo(ParallelDomainBase &pdb);
+    static void printBoundaryInfo(MeshDistributor &pdb);
   };
   
 } // namespace AMDiS
-- 
GitLab