diff --git a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc index d2daf4b2439191d39d3998689cb54c7cbb719fb6..04e6cc4403619e99c72a02e9e204a4c75825c26c 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc @@ -19,17 +19,16 @@ namespace AMDiS { void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat) { FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()"); -#if 0 + TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n"); TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n"); TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n"); - mat.resize(1); - mat[0].resize(1); - Mat &matIntInt = mat[0][0]; + double wtime = MPI::Wtime(); + petscData.create(interiorMap, coarseSpaceMap, + subdomainLevel, mpiCommLocal, mpiCommGlobal); - double wtime = MPI::Wtime(); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); nComponents = seqMat->getNumRows(); int nRankRows = (*interiorMap)[feSpace].nRankDofs; @@ -87,22 +86,22 @@ namespace AMDiS { MatCreateNest(mpiCommGlobal, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL, - &(nestMat[0]), &matIntInt); + &(nestMat[0]), &petscData.getInteriorMat()); #if (DEBUG != 0) MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime); #endif - MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY); - MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY); + petscData.assembly(); // === Init PETSc solver. === KSPCreate(mpiCommGlobal, &kspInterior); - KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN); + KSPSetOperators(kspInterior, + petscData.getInteriorMat(), + petscData.getInteriorMat(), SAME_NONZERO_PATTERN); KSPSetFromOptions(kspInterior); MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime); -#endif } diff --git a/AMDiS/src/parallel/PetscSolverSchur.cc b/AMDiS/src/parallel/PetscSolverSchur.cc index 3cbc0d019399009cdd7e3a3a251dfb62f9aaef7b..1560b8ade5070018338c8a698e68d208c46e819c 100644 --- a/AMDiS/src/parallel/PetscSolverSchur.cc +++ b/AMDiS/src/parallel/PetscSolverSchur.cc @@ -180,9 +180,8 @@ namespace AMDiS { { FUNCNAME("PetscSolverSchur::fillPetscMatrix()"); - mat.resize(1); - mat[0].resize(1); - Mat &matIntInt = mat[0][0]; + petscData.create(interiorMap, coarseSpaceMap, + subdomainLevel, mpiCommLocal, mpiCommGlobal); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); int nComponents = seqMat->getNumRows(); @@ -242,11 +241,11 @@ namespace AMDiS { tmpIS[0] = interiorIs; tmpIS[1] = boundaryIs; - MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], &matIntInt); - MatNestSetVecType(matIntInt, VECNEST); - MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY); - MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY); + MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], + &petscData.getInteriorMat()); + MatNestSetVecType(petscData.getInteriorMat(), VECNEST); + petscData.assembly(); int nRankRows = (*interiorMap)[feSpace].nRankDofs * nComponents; int nOverallRows = (*interiorMap)[feSpace].nOverallDofs * nComponents; @@ -284,7 +283,9 @@ namespace AMDiS { KSPCreate(mpiCommGlobal, &kspInterior); - KSPSetOperators(kspInterior, mat[0][0], mat[0][0], SAME_NONZERO_PATTERN); + KSPSetOperators(kspInterior, + petscData.getInteriorMat(), + petscData.getInteriorMat(), SAME_NONZERO_PATTERN); KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetFromOptions(kspInterior); @@ -335,7 +336,8 @@ namespace AMDiS { MatDestroy(&matA12); MatDestroy(&matA21); MatDestroy(&matA22); - MatDestroy(&mat[0][0]); + + petscData.destroy(); KSPDestroy(&kspInterior); }