diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 20eb83d3f6e6b201a48707516c72939e9a68aced..1aabfaea6784ec1e63d1b55b7c369f4a6a99ab10 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -29,6 +29,7 @@
 #include "parallel/CheckerPartitioner.h"
 #include "parallel/MpiHelper.h"
 #include "parallel/DofComm.h"
+#include "parallel/ParallelProblemStatBase.h"
 #include "io/ElementFileWriter.h"
 #include "io/MacroInfo.h"
 #include "io/MacroWriter.h"
@@ -145,6 +146,10 @@ namespace AMDiS {
     if (initialized)
       return;
 
+    double first = MPI::Wtime();
+    MSG("Initialization phase 1 needed %.5f seconds\n", 
+	first - ParallelProblemStatBase::initTimeStamp);
+
     TEST_EXIT(mpiSize > 1)
       ("Parallelization does not work with only one process!\n");
     TEST_EXIT(feSpaces.size() > 0)
@@ -348,6 +353,7 @@ namespace AMDiS {
       elObjDb.clear();
 
     initialized = true;
+    MSG("Init parallelization needed %.5f seconds\n", MPI::Wtime() - first);
   }
 
 
@@ -1325,8 +1331,10 @@ namespace AMDiS {
     bool partitioningSucceed = 
       partitioner->partition(elemWeights, ADAPTIVE_REPART);
     if (!partitioningSucceed) {
+      MPI::COMM_WORLD.Barrier();
       repartitioningFailed = 20;
       MSG("Mesh partitioner created empty partition!\n");
+      MSG("Mesh repartitioning needed %.5f seconds\n", MPI::Wtime() - timePoint); 
       return;
     }
 
@@ -1334,9 +1342,23 @@ namespace AMDiS {
     // In the case the partitioner does not create a new mesh partition, return
     // without and changes.
     if (!partitioner->meshChanged()) {
-      repartitioningFailed = 20;
       MSG("Mesh partition does not create a new partition!\n");
-      return;
+      MSG("Try to refine partitioning!\n");
+      partitioningSucceed = partitioner->partition(elemWeights, REFINE_PART);
+      if (partitioningSucceed) {
+	MSG("OKAY, ERST MAL GUT!\n");
+	if (partitioner->meshChanged())
+	  MSG("UND JA, DAS WARS!\n");
+	else
+	  MSG("NE, LEIDER NICHT!\n");
+      }
+      
+      if (!partitioningSucceed || !partitioner->meshChanged()) {
+	MPI::COMM_WORLD.Barrier();
+	repartitioningFailed = 20;
+	MSG("Mesh repartitioning needed %.5f seconds\n", MPI::Wtime() - timePoint);   
+	return;
+      }
     }
 
 
diff --git a/AMDiS/src/parallel/ParMetisPartitioner.cc b/AMDiS/src/parallel/ParMetisPartitioner.cc
index ce0967ff270c16df79e7803b0c86ffda0f14229b..8a52fbc5e50aa9c97ae6601d81fe59bd8a10de3a 100644
--- a/AMDiS/src/parallel/ParMetisPartitioner.cc
+++ b/AMDiS/src/parallel/ParMetisPartitioner.cc
@@ -299,9 +299,9 @@ namespace AMDiS {
       tpwgts[i] = 1.0 / static_cast<double>(nparts);
    
     float scale = 10000.0 / maxWgt;
-    for (int i = 0; i < nElements; i++)
+    for (int i = 0; i < nElements; i++)   
       wgts[i] = floatWgts[i];
-      //      wgts[i] = static_cast<int>(floatWgts[i] * scale);
+    //      wgts[i] = static_cast<int>(floatWgts[i] * scale);
 
 
     // === Start ParMETIS. ===
diff --git a/AMDiS/src/parallel/ParallelProblemStatBase.cc b/AMDiS/src/parallel/ParallelProblemStatBase.cc
index 6fd3382fa88cdf80c50b002c7657be1cb6c47bdc..0960bb7d272ddeeebefc5c581617b409f0c48b9f 100644
--- a/AMDiS/src/parallel/ParallelProblemStatBase.cc
+++ b/AMDiS/src/parallel/ParallelProblemStatBase.cc
@@ -17,6 +17,19 @@
 
 namespace AMDiS {
 
+  double ParallelProblemStatBase::initTimeStamp = 0.0;
+
+
+  ParallelProblemStatBase::ParallelProblemStatBase(std::string nameStr,
+						   ProblemIterationInterface *problemIteration)
+    : ProblemStatSeq(nameStr, problemIteration),
+      meshDistributor(NULL)
+  {
+    initTimeStamp = MPI::Wtime();
+    mpi::globalMin(initTimeStamp);
+  }
+
+
   void ParallelProblemStatBase::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
 						  bool assembleMatrix,
 						  bool assembleVector)
@@ -37,6 +50,9 @@ namespace AMDiS {
 					   ProblemStatSeq *adoptProblem,
 					   Flag adoptFlag)
   {
+    MSG("Initialization phase 0 needed %.5f seconds\n", 
+	MPI::Wtime() - initTimeStamp);
+
     ProblemStatSeq::initialize(initFlag, adoptProblem, adoptFlag);
 
     MeshDistributor::addProblemStatGlobal(this);
diff --git a/AMDiS/src/parallel/ParallelProblemStatBase.h b/AMDiS/src/parallel/ParallelProblemStatBase.h
index 6eeb5c67f0f06aa372158030a5ef60934bc70fe2..aa6eca77fd133901c1755155d6bc73e6940d378e 100644
--- a/AMDiS/src/parallel/ParallelProblemStatBase.h
+++ b/AMDiS/src/parallel/ParallelProblemStatBase.h
@@ -32,10 +32,7 @@ namespace AMDiS {
   {
   public:
     ParallelProblemStatBase(std::string nameStr,
-			    ProblemIterationInterface *problemIteration = NULL)
-      : ProblemStatSeq(nameStr, problemIteration),
-	meshDistributor(NULL)
-    {}
+			    ProblemIterationInterface *problemIteration = NULL);
 
     virtual ~ParallelProblemStatBase() {}
 
@@ -51,6 +48,9 @@ namespace AMDiS {
 
   protected:
     MeshDistributor *meshDistributor;
+
+  public:
+    static double initTimeStamp;
   };
 
 }
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 597374c657ad6a1ddd9caa71e22c869992e9c01a..b0863e35679409300ba46470c60bc3f77acf9838 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -1006,9 +1006,9 @@ namespace AMDiS {
       // matTmp = inv(A_BB) trans(J) trans(Q)
       Mat qT, jTqT;
       MatTranspose(mat_augmented_lagrange, MAT_INITIAL_MATRIX, &qT);
-      Mat jT;
+      //      Mat jT;
       MSG("START COMPUTING MAT TRANS\n");
-      MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &jT);
+      //      MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &jT);
       MSG("DONE\n");      
       MatTransposeMatMult(mat_lagrange, qT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, 
 			  &jTqT);
@@ -2118,35 +2118,6 @@ namespace AMDiS {
       interfaceDofMap.createVec(vecRhsInterface);
       interfaceDofMap.createVec(vecSolInterface);
 
-      {
-	// PetscViewer petscView;
-	// PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec", 
-	// 		      FILE_MODE_READ, &petscView);
-	
-	// VecLoad(vecSolInterface, petscView);
-	// PetscViewerDestroy(&petscView);
-      } 
-      {
-	//	PetscViewer petscView;
-	// PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec", 
-	// 		      FILE_MODE_READ, &petscView);
-	
-	// VecLoad(vecSolLagrange, petscView);
-	// PetscViewerDestroy(&petscView);
-      } 
-
-      {
-	int n;
-	VecGetSize(vecSolInterface, &n);
-	double sum;
-	VecSum(vecSolInterface, &sum);
-	sum = -sum / static_cast<int>(n);
-	MSG("AVRG = %e\n", sum);
-      }
-
-
-
-
       Vec vecRhsArray[2] = {vecRhsInterface, vecRhsLagrange}; 
       VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecRhsArray, &vecRhs);
 
@@ -2262,38 +2233,8 @@ namespace AMDiS {
     PetscSolverFetiDebug::debugFeti(*this, vecRhs);
 
     // === Solve with FETI-DP operator. ===
-    KSPSetInitialGuessNonzero(ksp_feti, PETSC_TRUE);
     KSPSolve(ksp_feti, vecRhs, vecSol);
 
-      {
-	int n;
-	VecGetSize(vecSolInterface, &n);
-	double sum;
-	VecSum(vecSolInterface, &sum);
-	sum = -sum / static_cast<int>(n);
-	MSG("SOL PRESSURE AVRG = %e\n", sum);
-      }
-
-    
-
-    {
-      PetscViewer petscView;
-      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec", 
-			    FILE_MODE_WRITE, &petscView);
-      VecView(vecSolInterface, petscView);
-      PetscViewerDestroy(&petscView);
-    }
-
-
-    {
-      PetscViewer petscView;
-      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec", 
-			    FILE_MODE_WRITE, &petscView);
-      VecView(vecSolLagrange, petscView);
-      PetscViewerDestroy(&petscView);
-    }
-
-
     if (printTimings) {
       MPI::COMM_WORLD.Barrier();
       MSG("FETI-DP timing 10: %.5f seconds (application of FETI-DP operator)\n",