From ae15b32c577ec65819b7e0872e37e9a4d1017a5f Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Mon, 20 Aug 2012 14:51:51 +0000
Subject: [PATCH] Now it compiles but does not work, for sure.

---
 .../parallel/PetscSolverGlobalBlockMatrix.cc  | 19 +++++++++---------
 AMDiS/src/parallel/PetscSolverSchur.cc        | 20 ++++++++++---------
 2 files changed, 20 insertions(+), 19 deletions(-)

diff --git a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
index d2daf4b2..04e6cc44 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
@@ -19,17 +19,16 @@ namespace AMDiS {
   void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
   {
     FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
-#if 0
+
     TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
     TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
     TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");
 
-    mat.resize(1);
-    mat[0].resize(1);
-    Mat &matIntInt = mat[0][0];
+    double wtime = MPI::Wtime();
 
+    petscData.create(interiorMap, coarseSpaceMap, 
+		     subdomainLevel, mpiCommLocal, mpiCommGlobal);
 
-    double wtime = MPI::Wtime();
     const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
     nComponents = seqMat->getNumRows();
     int nRankRows = (*interiorMap)[feSpace].nRankDofs;
@@ -87,22 +86,22 @@ namespace AMDiS {
 
     MatCreateNest(mpiCommGlobal, 
 		  nBlocks, PETSC_NULL, nBlocks, PETSC_NULL, 
-		  &(nestMat[0]), &matIntInt);
+		  &(nestMat[0]), &petscData.getInteriorMat());
 
 #if (DEBUG != 0)
     MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
 #endif
 
-    MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
+    petscData.assembly();
 
     // === Init PETSc solver. ===
     KSPCreate(mpiCommGlobal, &kspInterior);
-    KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN); 
+    KSPSetOperators(kspInterior, 
+		    petscData.getInteriorMat(), 
+		    petscData.getInteriorMat(), SAME_NONZERO_PATTERN); 
     KSPSetFromOptions(kspInterior);
 
     MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
-#endif
   }
 
 
diff --git a/AMDiS/src/parallel/PetscSolverSchur.cc b/AMDiS/src/parallel/PetscSolverSchur.cc
index 3cbc0d01..1560b8ad 100644
--- a/AMDiS/src/parallel/PetscSolverSchur.cc
+++ b/AMDiS/src/parallel/PetscSolverSchur.cc
@@ -180,9 +180,8 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
 
-    mat.resize(1);
-    mat[0].resize(1);
-    Mat &matIntInt = mat[0][0];
+    petscData.create(interiorMap, coarseSpaceMap, 
+		     subdomainLevel, mpiCommLocal, mpiCommGlobal);
 
     const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
     int nComponents = seqMat->getNumRows();
@@ -242,11 +241,11 @@ namespace AMDiS {
     tmpIS[0] = interiorIs;
     tmpIS[1] = boundaryIs;
 
-    MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], &matIntInt);
-    MatNestSetVecType(matIntInt, VECNEST);
-    MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
-    MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
+    MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], 
+		  &petscData.getInteriorMat());
+    MatNestSetVecType(petscData.getInteriorMat(), VECNEST);
 
+    petscData.assembly();
 
     int nRankRows = (*interiorMap)[feSpace].nRankDofs * nComponents;
     int nOverallRows = (*interiorMap)[feSpace].nOverallDofs * nComponents;
@@ -284,7 +283,9 @@ namespace AMDiS {
 
     KSPCreate(mpiCommGlobal, &kspInterior);
 
-    KSPSetOperators(kspInterior, mat[0][0], mat[0][0], SAME_NONZERO_PATTERN); 
+    KSPSetOperators(kspInterior, 
+		    petscData.getInteriorMat(), 
+		    petscData.getInteriorMat(), SAME_NONZERO_PATTERN); 
     KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
     KSPSetFromOptions(kspInterior);
 
@@ -335,7 +336,8 @@ namespace AMDiS {
     MatDestroy(&matA12);
     MatDestroy(&matA21);
     MatDestroy(&matA22);
-    MatDestroy(&mat[0][0]);
+
+    petscData.destroy();
 
     KSPDestroy(&kspInterior);
   }
-- 
GitLab