diff --git a/AMDiS/CMakeLists.txt b/AMDiS/CMakeLists.txt
index 93f88a9ad9ddbdecebe77dbb9653a36c9cb1a3ba..5a4c8414fa3ac6622fff1a75040c3dc1ff39f5e3 100644
--- a/AMDiS/CMakeLists.txt
+++ b/AMDiS/CMakeLists.txt
@@ -267,6 +267,7 @@ if(ENABLE_PARALLEL_DOMAIN)
 			${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc
 			${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
 			${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc
+			${SOURCE_DIR}/parallel/PetscSolverNavierStokes.cc
 			${SOURCE_DIR}/parallel/PetscSolverSchur.cc)
 	elseif(ENABLE_PARALLEL_DOMAIN STREQUAL "PMTL")
 		set(MTL_INCLUDE_DIR "")
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
index 0d4424682b2eaf17ce6d914c4bdee6aac9f3437d..b9c57b2edef382d4bab44aaee373e83f0a840db8 100644
--- a/AMDiS/src/parallel/PetscProblemStat.cc
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -18,6 +18,7 @@
 #include "parallel/PetscSolver.h"
 #include "parallel/MpiHelper.h"
 #include "parallel/BddcMlSolver.h"
+#include "parallel/PetscSolverNavierStokes.h"
 
 namespace AMDiS {
 
@@ -48,6 +49,8 @@ namespace AMDiS {
 #else
       ERROR_EXIT("AMDiS was compiled without BDDC-ML support!\n");
 #endif
+    } else if (tmp == "petsc-stokes") {
+      petscSolver = new PetscSolverNavierStokes();
     } else {
       ERROR_EXIT("No parallel solver %s available!\n", tmp.c_str());
     }
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 94dbd8cd1f1873f31af35f8c6ccd90b7d6e7369e..b3c0208b009da5c53f2dd852757738cb910ba1df 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -53,6 +53,8 @@ namespace AMDiS {
 #endif
 
     matAssembly();
+
+    removeDirichletRows(seqMat, dofMap, getMatInterior());
     
     if (printMatInfo) {
       MatInfo matInfo;
@@ -65,24 +67,13 @@ namespace AMDiS {
       MSG("  nz unneeded: %d\n", static_cast<int>(matInfo.nz_unneeded));
     }
 
-    // === Init PETSc solver. ===
 
-    KSPCreate(mpiCommGlobal, &kspInterior);
-    KSPGetPC(kspInterior, &pcInterior);
-    KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), 
-		    SAME_NONZERO_PATTERN); 
-    KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
-    KSPSetType(kspInterior, KSPBCGS);
-    KSPSetOptionsPrefix(kspInterior, kspPrefix.c_str());
-    KSPSetFromOptions(kspInterior);
+    // === Init PETSc solver and preconditioner objects. ===
 
+    initSolver(kspInterior);
+    KSPGetPC(kspInterior, &pcInterior);
     initPreconditioner(pcInterior);
 
-    // Do not delete the solution vector, use it for the initial guess.
-    if (!zeroStartVector)
-      KSPSetInitialGuessNonzero(kspInterior, PETSC_TRUE);
-
-    removeDirichletRows(seqMat, dofMap, getMatInterior());
 
 #if (DEBUG != 0)
     MSG("Fill petsc matrix 3 needed %.5f seconds\n", MPI::Wtime() - wtime);
@@ -450,11 +441,11 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverGlobalMatrix::destroyMatrixData()");
 
-    exitPreconditioner(pcInterior);
-
     matDestroy();
 
-    KSPDestroy(&kspInterior);
+    exitPreconditioner(pcInterior);
+
+    exitSolver(kspInterior);
   }
 
 
@@ -596,14 +587,50 @@ namespace AMDiS {
 	   isNames[i].c_str());
       }
 
-      IS is;
-      interiorMap->createIndexSet(is, blockComponents[0], nComponents);
-      PCFieldSplitSetIS(pc, isNames[i].c_str(), is);
-      ISDestroy(&is);
+      createFieldSplit(pc, isNames[i], blockComponents);
     }
   }
 
 
+  void PetscSolverGlobalMatrix::createFieldSplit(PC pc, 
+						 string splitName, 
+						 vector<int> &components)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::createFieldSplit()");
+
+    IS is;
+    interiorMap->createIndexSet(is, components[0], components.size());
+    PCFieldSplitSetIS(pc, isNames[i].c_str(), is);
+    ISDestroy(&is);
+  }
+
+
+  void PetscSolverGlobalMatrix::initSolver(KSP ksp)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::initSolver()");
+
+    KSPCreate(mpiCommGlobal, &ksp);
+    KSPSetOperators(ksp, getMatInterior(), getMatInterior(), 
+		    SAME_NONZERO_PATTERN); 
+    KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
+    KSPSetType(ksp, KSPBCGS);
+    KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
+    KSPSetFromOptions(ksp);
+
+    // Do not delete the solution vector, use it for the initial guess.
+    if (!zeroStartVector)
+      KSPSetInitialGuessNonzero(ksp, PETSC_TRUE);
+  }  
+
+
+  void PetscSolverGlobalMatrix::exitSolver(KSP ksp)
+  {
+    FUNCNAME("PetscSolverGlobalMatrix::exitSolver()");
+
+    KSPDestroy(&ksp);
+  }  
+
+
   void PetscSolverGlobalMatrix::initPreconditioner(PC pc)
   {
     FUNCNAME("PetscSolverGlobalMatrix::initPreconditioner()");
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
index da09a39da6ec3a85f8a1f9bd75eec5e7ea5b21ad..0a5f4089a18107970c825a6e8820a1eef8c3bb26 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h
@@ -71,8 +71,27 @@ namespace AMDiS {
 			     ParallelDofMapping &dofMap,
 			     Vec mpiVec);
 
+    /// Reads field split information and creats a splitting based on 
+    /// component numbers.
     void createFieldSplit(PC pc);
 
+    /** \brief 
+     * Creates a new field split for a preconditioner object.
+     *
+     * \param[in] pc          PETSc preconditioner object, must be of
+     *                        type PCFIELDSPLIT
+     * \param[in] splitName   Name of the field split, can be used to set other
+     *                        parameters of the PCFIELDSPLIT preconditioner on
+     *                        the command line.
+     * \param[in] components  System component numbers of the field split. At
+     *                        the moment only continuous splits are allowed.
+     */
+    void createFieldSplit(PC pc, string splitName, vector<int> &components);
+
+    virtual void initSolver(KSP ksp);
+
+    virtual void exitSolver(KSP ksp);
+
     virtual void initPreconditioner(PC pc);
 
     virtual void exitPreconditioner(PC pc);
diff --git a/AMDiS/src/parallel/PetscSolverNavierStokes.cc b/AMDiS/src/parallel/PetscSolverNavierStokes.cc
new file mode 100644
index 0000000000000000000000000000000000000000..cc9f819dc82d021b8ab67fe1b990589caa426630
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverNavierStokes.cc
@@ -0,0 +1,56 @@
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+#include "parallel/PetscSolverNavierStokes.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+  
+  void PetscSolverNavierStokes::initSolver(KSP ksp)
+  {
+    FUNCNAME("PetscSolverNavierStokes::initSolver()");
+
+    MSG("RUN NAVIER STOKES SOLVER INIT!\n");
+
+    KSPCreate(mpiCommGlobal, &ksp);
+    KSPSetOperators(ksp, getMatInterior(), getMatInterior(), 
+		    SAME_NONZERO_PATTERN); 
+    KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
+    KSPSetType(ksp, KSPGMRES);
+    KSPSetOptionsPrefix(ksp, "ns_");
+    KSPSetFromOptions(ksp);
+    KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL);
+  }
+
+
+  void PetscSolverNavierStokes::initPreconditioner(PC pc)
+  {
+    FUNCNAME("PetscSolverNavierStokes::initPreconditioner()");
+
+    MSG("RUN NAVIER STOKES PRECONDITIONER INIT!\n");
+
+    vector<int> velocityComponents;
+    velocityComponents.push_back(0);
+    velocityComponents.push_back(1);
+    vector<int> pressureComponent;
+    pressureComponent.push_back(2);
+
+    PCSetType(pc, PCFIELDSPLIT);
+    PCFieldSplitSetSchurFactType(pc, PC_FIELDSPLIT_SCHUR_FACT_FULL);
+    createFieldSplit(pc, "velocity", velocityComponents);
+    createFieldSplit(pc, "pressure", pressureComponent);
+  }
+
+}
diff --git a/AMDiS/src/parallel/PetscSolverNavierStokes.h b/AMDiS/src/parallel/PetscSolverNavierStokes.h
new file mode 100644
index 0000000000000000000000000000000000000000..57a869947f8f948ac08f685392abe3075a32847d
--- /dev/null
+++ b/AMDiS/src/parallel/PetscSolverNavierStokes.h
@@ -0,0 +1,47 @@
+// ============================================================================
+// ==                                                                        ==
+// == AMDiS - Adaptive multidimensional simulations                          ==
+// ==                                                                        ==
+// ==  http://www.amdis-fem.org                                              ==
+// ==                                                                        ==
+// ============================================================================
+//
+// Software License for AMDiS
+//
+// Copyright (c) 2010 Dresden University of Technology 
+// All rights reserved.
+// Authors: Simon Vey, Thomas Witkowski et al.
+//
+// This file is part of AMDiS
+//
+// See also license.opensource.txt in the distribution.
+
+
+
+/** \file PetscSolverNavierStokes.h */
+
+#ifndef AMDIS_PETSC_SOLVER_NAVIER_STOKES_H
+#define AMDIS_PETSC_SOLVER_NAVIER_STOKES_H
+
+#include "parallel/PetscSolverGlobalMatrix.h"
+
+namespace AMDiS {
+
+  using namespace std;
+
+  class PetscSolverNavierStokes : public PetscSolverGlobalMatrix
+  {
+  public:
+    PetscSolverNavierStokes()
+      : PetscSolverGlobalMatrix()
+    {}
+
+  protected:
+    void initSolver(KSP ksp);
+
+    void initPreconditioner(PC pc);
+  };
+
+}
+
+#endif