diff --git a/AMDiS/src/parallel/PetscHelper.cc b/AMDiS/src/parallel/PetscHelper.cc index 28e377df80eedf3dd8872d8b3afb69f248f686c0..431e24a2886a7d3e75f1a08a268d360e2df9e5a8 100644 --- a/AMDiS/src/parallel/PetscHelper.cc +++ b/AMDiS/src/parallel/PetscHelper.cc @@ -304,16 +304,12 @@ namespace AMDiS PCSetFromOptions(pc); #ifndef NDEBUG - MSG("PetscOptionsView:\n"); - PetscViewer viewer; - PetscViewerCreate(PETSC_COMM_WORLD, &viewer); - PetscViewerSetType(viewer, PETSCVIEWERASCII); -#if (PETSC_VERSION_MINOR >= 7) - PetscOptionsView(PETSC_NULL, viewer); -#else - PetscOptionsView(viewer); -#endif - PetscViewerDestroy(&viewer); + MSG("PetscOptionsView:\n"); + PetscViewer viewer; + PetscViewerCreate(PETSC_COMM_WORLD, &viewer); + PetscViewerSetType(viewer, PETSCVIEWERASCII); + petsc::options_view(viewer); + PetscViewerDestroy(&viewer); #endif } @@ -334,19 +330,15 @@ namespace AMDiS void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix, int info) { KSPCreate(comm, &ksp); - #if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, m, m); - #else - KSPSetOperators(ksp, m, m, SAME_NONZERO_PATTERN); - #endif + petsc::ksp_set_operators(ksp, m, m); KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetType(ksp, KSPBCGS); KSPSetOptionsPrefix(ksp, kspPrefix.c_str()); if (info >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (info >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); } } // end namespace petsc_helper diff --git a/AMDiS/src/parallel/PetscHelper.h b/AMDiS/src/parallel/PetscHelper.h index 41da017d58509e844ddfa2bccdf8df34ed43ca60..4ac633ff8a849efa4e12fe20b42c4de2d0d27fd5 100644 --- a/AMDiS/src/parallel/PetscHelper.h +++ b/AMDiS/src/parallel/PetscHelper.h @@ -31,13 +31,6 @@ #include <petsc.h> #include "AMDiS_fwd.h" -#if (PETSC_VERSION_MINOR >= 7) - #define PETSC_MONITOR_CAST(...) \ - (PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))(__VA_ARGS__) -#else - #define PETSC_MONITOR_CAST(...) __VA_ARGS__ -#endif - namespace AMDiS { namespace Parallel @@ -126,7 +119,82 @@ namespace AMDiS void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0); } // end namespace petsc_helper + } // end namespace Parallel + + + // functions for PETSc API changes + namespace petsc + { + inline PetscErrorCode options_view(PetscViewer viewer) + { +#if (PETSC_VERSION_MINOR >= 7) + return PetscOptionsView(PETSC_NULL, viewer); +#else + return PetscOptionsView(viewer); +#endif + } + + inline PetscErrorCode options_insert_string(const char in_str[]) + { +#if (PETSC_VERSION_MINOR >= 7) + return PetscOptionsInsertString(PETSC_NULL, in_str); +#else + return PetscOptionsInsertString(in_str); +#endif + } + + inline PetscErrorCode ksp_set_operators(KSP ksp, Mat Amat,Mat Pmat) + { +#if (PETSC_VERSION_MINOR >= 5) + return KSPSetOperators(ksp, Amat, Pmat); +#else + return KSPSetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN); +#endif + } + + inline PetscErrorCode ksp_get_operators(KSP ksp, Mat *Amat,Mat *Pmat) + { +#if (PETSC_VERSION_MINOR >= 5) + return KSPGetOperators(ksp, Amat, Pmat); +#else + return KSPGetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN); +#endif + } + + template <class Monitor> + inline PetscErrorCode ksp_monitor_set(KSP ksp, Monitor monitor) + { +#if (PETSC_VERSION_MINOR >= 7) + PetscViewerAndFormat *vf; + PetscErrorCode ierr; + ierr = PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,&vf);CHKERRQ(ierr); + ierr = KSPMonitorSet(ksp,(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr); + return ierr; +#else + return KSPMonitorSet(ksp, monitor, PETSC_NULL, PETSC_NULL); +#endif + } + + inline PetscErrorCode mat_create_vecs(Mat mat,Vec *right,Vec *left) + { +#if (PETSC_VERSION_MINOR >= 6) + return MatCreateVecs(mat, right, left); +#else + return MatGetVecs(mat, right, left); +#endif + } + + inline PetscErrorCode mat_nullspace_remove(MatNullSpace sp,Vec vec) + { +#if (PETSC_VERSION_MINOR >= 5) + return MatNullSpaceRemove(sp, vec); +#else + return MatNullSpaceRemove(sp, vec, PETSC_NULL); +#endif + } + + } // end namespace petsc } // end namespace AMDiS #endif diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc index 0393acba8820d5df4463df41527c9ecaf2a4511e..c6ccd17a8b9c754e505aa6e7113af98eedfae6c3 100644 --- a/AMDiS/src/parallel/PetscSolver.cc +++ b/AMDiS/src/parallel/PetscSolver.cc @@ -26,6 +26,7 @@ #include "parallel/MeshDistributor.h" #include "parallel/MpiHelper.h" #include "parallel/ParallelDofMapping.h" +#include "parallel/PetscHelper.h" #include "parallel/PetscSolver.h" #include "parallel/StdMpi.h" @@ -49,7 +50,7 @@ namespace AMDiS { namespace Parallel { Parameters::get(name + "->ksp", kspStr); if (kspStr != "") - petsc_options_insert_string(kspStr.c_str()); + petsc::options_insert_string(kspStr.c_str()); Parameters::get(name + "->remove rhs null space", removeRhsNullspace); Parameters::get(name + "->has constant null space", hasConstantNullspace); diff --git a/AMDiS/src/parallel/PetscSolverCahnHilliard.cc b/AMDiS/src/parallel/PetscSolverCahnHilliard.cc index ec8e1edf67444543bfde33ec33ac1bba4fe6cd50..10c915d12383b9ac8eb87f12395d861a3dd5074e 100644 --- a/AMDiS/src/parallel/PetscSolverCahnHilliard.cc +++ b/AMDiS/src/parallel/PetscSolverCahnHilliard.cc @@ -85,15 +85,11 @@ namespace AMDiS { namespace Parallel { { // Create FGMRES based outer solver KSPCreate(meshDistributor->getMpiComm(0), &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); KSPSetFromOptions(ksp); diff --git a/AMDiS/src/parallel/PetscSolverCahnHilliard2.cc b/AMDiS/src/parallel/PetscSolverCahnHilliard2.cc index 29b67b812202d7caa09017731832f9cbcc6bc46b..55bc9c2331e6fb71f902c1c4207f41af83173bf2 100644 --- a/AMDiS/src/parallel/PetscSolverCahnHilliard2.cc +++ b/AMDiS/src/parallel/PetscSolverCahnHilliard2.cc @@ -67,11 +67,7 @@ namespace AMDiS { namespace Parallel { /// create new solver for S KSP kspS; KSPCreate(*data->mpiCommGlobal, &kspS); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(kspS, S, S); -#else - KSPSetOperators(kspS, S, S, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(kspS, S, S); petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 1); { PC pc; @@ -117,15 +113,11 @@ namespace AMDiS { namespace Parallel { MSG("CREATE POS 1: %p\n", &ksp); KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); if (useOldInitialGuess) diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 43e1f8c6068ca35e69be98d3e0d2989d0acdb673..a53499a67345690692de1c09784b0447f342d132 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -1051,11 +1051,7 @@ namespace AMDiS { namespace Parallel { } KSPCreate(domainComm, &ksp_schur_primal); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal); -#else - KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetType(ksp_schur_primal, KSPGMRES); KSPSetFromOptions(ksp_schur_primal); @@ -1075,11 +1071,7 @@ namespace AMDiS { namespace Parallel { // === Create KSP solver object and set appropriate solver options. === KSPCreate(domainComm, &ksp_schur_primal); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal); -#else - KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetType(ksp_schur_primal, KSPPREONLY); PC pc_schur_primal; @@ -1365,11 +1357,7 @@ namespace AMDiS { namespace Parallel { } KSPCreate(domainComm, &ksp_feti); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_feti, mat_feti, mat_feti); -#else - KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti); KSPSetOptionsPrefix(ksp_feti, "feti_"); KSPSetType(ksp_feti, KSPGMRES); KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); @@ -1382,9 +1370,9 @@ namespace AMDiS { namespace Parallel { Parameters::get(name + "->feti->monitor", monitor); if (monitor) { if (stokesMode) - KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorFetiStokes), &fetiKspData, PETSC_NULL); + petsc::ksp_monitor_set(ksp_feti, KSPMonitorFetiStokes); else - KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp_feti, KSPMonitorTrueResidualNorm); } @@ -1443,11 +1431,7 @@ namespace AMDiS { namespace Parallel { (void(*)(void))petscMultMatFetiInexact); KSPCreate(domainComm, &ksp_feti); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_feti, mat_feti, mat_feti); -#else - KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti); KSPSetOptionsPrefix(ksp_feti, "feti_"); KSPSetType(ksp_feti, KSPGMRES); KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); @@ -1464,11 +1448,7 @@ namespace AMDiS { namespace Parallel { createVec(localDofMap, fetiInexactPreconData.tmp_vec_b0); KSPCreate(domainComm, &(fetiInexactPreconData.ksp_pc_feti)); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange); -#else - KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange); KSPGetPC(fetiInexactPreconData.ksp_pc_feti, &(fetiInexactPreconData.pc_feti)); createFetiPreconLumped(fetiInexactPreconData.pc_feti); @@ -1505,17 +1485,11 @@ namespace AMDiS { namespace Parallel { VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), localDofMap.getRankDofs(), nGlobalOverallInterior, &(lumpedData->tmp_vec_b0)); -#if (PETSC_VERSION_MINOR >= 6) - MatCreateVecs(mat_duals_duals, PETSC_NULL, - &(lumpedData->tmp_vec_duals0)); - MatCreateVecs(mat_duals_duals, PETSC_NULL, - &(lumpedData->tmp_vec_duals1)); -#else - MatGetVecs(mat_duals_duals, PETSC_NULL, - &(lumpedData->tmp_vec_duals0)); - MatGetVecs(mat_duals_duals, PETSC_NULL, - &(lumpedData->tmp_vec_duals1)); -#endif + + petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL, + &(lumpedData->tmp_vec_duals0)); + petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL, + &(lumpedData->tmp_vec_duals1)); for (int component = 0; component < static_cast<int>(componentSpaces.size()); component++) { @@ -1607,11 +1581,7 @@ namespace AMDiS { namespace Parallel { ("Stokes mode does not yet support the Dirichlet precondition!\n"); KSPCreate(PETSC_COMM_SELF, &ksp_interior); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior); -#else - KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_interior, mat_interior_interior, mat_interior_interior); KSPSetOptionsPrefix(ksp_interior, "precon_interior_"); KSPSetType(ksp_interior, KSPPREONLY); PC pc_interior; @@ -1635,21 +1605,13 @@ namespace AMDiS { namespace Parallel { VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), localDofMap.getRankDofs(), nGlobalOverallInterior, &(fetiDirichletPreconData.tmp_vec_b)); -#if (PETSC_VERSION_MINOR >= 6) - MatCreateVecs(mat_duals_duals, PETSC_NULL, - &(fetiDirichletPreconData.tmp_vec_duals0)); - MatCreateVecs(mat_duals_duals, PETSC_NULL, - &(fetiDirichletPreconData.tmp_vec_duals1)); - MatCreateVecs(mat_interior_interior, PETSC_NULL, - &(fetiDirichletPreconData.tmp_vec_interior)); -#else - MatGetVecs(mat_duals_duals, PETSC_NULL, + + petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals0)); - MatGetVecs(mat_duals_duals, PETSC_NULL, + petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals1)); - MatGetVecs(mat_interior_interior, PETSC_NULL, + petsc::mat_create_vecs(mat_interior_interior, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_interior)); -#endif TEST_EXIT_DBG(subDomainIsLocal) ("Should not happen, check usage of localDofMap!\n"); @@ -2509,13 +2471,8 @@ namespace AMDiS { namespace Parallel { Vec vecRhs, vecSol; Vec vecRhsLagrange, vecSolLagrange; -#if (PETSC_VERSION_MINOR >= 6) - MatCreateVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); - MatCreateVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange); -#else - MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); - MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange); -#endif + petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); + petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecSolLagrange); vecRhs = vecRhsLagrange; vecSol = vecSolLagrange; diff --git a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc index aea0fb81c930d466f99929f1a81089ce4a76c9b0..46c6ae869b4e03e840d3b633a22f1807421124d9 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc @@ -20,6 +20,7 @@ #include "parallel/PetscSolverGlobalBlockMatrix.h" +#include "parallel/PetscHelper.h" #include "parallel/StdMpi.h" #include "parallel/MpiHelper.h" @@ -150,11 +151,7 @@ namespace AMDiS { namespace Parallel { FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()"); KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); KSPSetOptionsPrefix(ksp, kspPrefix.c_str()); KSPSetFromOptions(ksp); } diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 626b5407d39185dc8efb98f5eb64c78c44206e2c..d58d07a101d6ebebe1074876b06050e31038e357 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -22,6 +22,7 @@ // #include "DirichletBC.h" #include "DOFVector.h" #include "parallel/PetscSolverGlobalMatrix.h" +#include "parallel/PetscHelper.h" #include "parallel/StdMpi.h" #include "parallel/MpiHelper.h" #include "solver/PetscTypes.h" @@ -50,15 +51,15 @@ namespace AMDiS { namespace Parallel { if (params.matSolverPackage.find(kspSolver) != params.matSolverPackage.end()) { // direct solvers - petsc_options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str()); - petsc_options_insert_string(("-" + kspPrefix + "pc_type lu").c_str()); - petsc_options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "pc_type lu").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str()); setMaxIterations(1); zeroStartVector = true; matSolverPackage = true; } else if (params.emptyParam.find(kspSolver) == params.emptyParam.end() && solverName != "petsc") { // other solvers - petsc_options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str()); } // set the preconditioner @@ -70,17 +71,17 @@ namespace AMDiS { namespace Parallel { Parameters::get(name + "->right precon", precon); if (!matSolverPackage && params.emptyParam.find(precon) == params.emptyParam.end()) { precon = (params.preconMap.find(precon) != params.preconMap.end() ? params.preconMap[precon] : precon); - petsc_options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str()); } - petsc_options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str()); - petsc_options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str()); - petsc_options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str()); if (getInfo() >= 20) - petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str()); else if (getInfo() >= 10) - petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str()); } if (!matSolverPackage) { Parameters::get(name + "->use zero start vector", zeroStartVector); @@ -95,11 +96,7 @@ namespace AMDiS { namespace Parallel { PetscViewer viewer; PetscViewerCreate(PETSC_COMM_WORLD, &viewer); PetscViewerSetType(viewer, PETSCVIEWERASCII); -#if (PETSC_VERSION_MINOR >= 7) - PetscOptionsView(PETSC_NULL, viewer); -#else - PetscOptionsView(viewer); -#endif + petsc::options_view(viewer); PetscViewerDestroy(&viewer); } @@ -333,11 +330,7 @@ namespace AMDiS { namespace Parallel { // === Create solver for the non primal (thus local) variables. === KSPCreate(domainComm, &kspInterior); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(kspInterior, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(kspInterior, getMatInterior(), getMatInterior()); KSPSetOptionsPrefix(kspInterior, "interior_"); KSPSetType(kspInterior, KSPPREONLY); KSPGetPC(kspInterior, &pcInterior); @@ -460,11 +453,7 @@ namespace AMDiS { namespace Parallel { TEST_EXIT_DBG(coarseSpaceMap.empty())("Not supported!\n"); MSG("Remove nullspace from rhs vector.\n"); -#if (PETSC_VERSION_MINOR >= 5) - MatNullSpaceRemove(matNullspace, getVecRhsInterior()); -#else - MatNullSpaceRemove(matNullspace, getVecRhsInterior(), PETSC_NULL); -#endif + petsc::mat_nullspace_remove(matNullspace, getVecRhsInterior()); } } else { TEST_EXIT(removeRhsNullspace == false) @@ -617,11 +606,7 @@ namespace AMDiS { namespace Parallel { void PetscSolverGlobalMatrix::initSolver(KSP &ksp) { KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetType(ksp, KSPBCGS); KSPSetOptionsPrefix(ksp, kspPrefix.c_str()); @@ -897,11 +882,7 @@ namespace AMDiS { namespace Parallel { if (test) { Vec tmp; -#if (PETSC_VERSION_MINOR >= 6) - MatCreateVecs(getMatInterior(), &tmp, PETSC_NULL); -#else - MatGetVecs(getMatInterior(), &tmp, PETSC_NULL); -#endif + petsc::mat_create_vecs(getMatInterior(), &tmp, PETSC_NULL); MatMult(getMatInterior(), nullSpaceBasis, tmp); PetscReal n; VecNorm(tmp, NORM_2, &n); @@ -914,11 +895,7 @@ namespace AMDiS { namespace Parallel { MatNullSpaceCreate(domainComm, PETSC_FALSE, 1, &nullSpaceBasis, &matNullSpace); Mat amat, pmat; -#if (PETSC_VERSION_MINOR >= 5) - KSPGetOperators(ksp, &amat, &pmat); -#else - KSPGetOperators(ksp, &amat, &pmat, PETSC_NULL); -#endif + petsc::ksp_get_operators(ksp, &amat, &pmat); MatSetNullSpace(amat, matNullSpace); MatNullSpaceDestroy(&matNullSpace); @@ -931,11 +908,7 @@ namespace AMDiS { namespace Parallel { MatNullSpace matNullSpace; MatNullSpaceCreate(domainComm, PETSC_TRUE, 0, PETSC_NULL, &matNullSpace); Mat amat, pmat; -#if (PETSC_VERSION_MINOR >= 5) - KSPGetOperators(ksp, &amat, &pmat); -#else - KSPGetOperators(ksp, &amat, &pmat, PETSC_NULL); -#endif + petsc::ksp_get_operators(ksp, &amat, &pmat); MatSetNullSpace(amat, matNullSpace); MatNullSpaceDestroy(&matNullSpace); } diff --git a/AMDiS/src/parallel/PetscSolverNSCH.cc b/AMDiS/src/parallel/PetscSolverNSCH.cc index 0a065a65ea3eb9383009fd8f9e22c0199ceb182b..8f361206c0becf9188c38e652a201ba0e93a9e30 100644 --- a/AMDiS/src/parallel/PetscSolverNSCH.cc +++ b/AMDiS/src/parallel/PetscSolverNSCH.cc @@ -184,12 +184,8 @@ namespace AMDiS { namespace Parallel { // Create FGMRES based outer solver MSG("CREATE POS 1: %p\n", &ksp); KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCSHELL, getRelative(), getTolerance(), getMaxIterations()); setConstantNullSpace(ksp, componentSpaces[0]->getMesh()->getDim() , true); } @@ -337,12 +333,7 @@ namespace AMDiS { namespace Parallel { ///erstelle kspVelocity KSPCreate((meshDistributor->getMpiComm(0)), &(matShellContext.kspVelocity)); - -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat); -#else - KSPSetOperators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat); ///regularisiere LaplaceMatrix if (regularizeLaplace) @@ -351,11 +342,7 @@ namespace AMDiS { namespace Parallel { rows[0]=0; MatZeroRows(laplaceMatrixSolver->getMatInterior(), 1, rows, 0, PETSC_NULL, PETSC_NULL); KSPCreate((meshDistributor->getMpiComm(0)), &(matShellContext.kspLaplace)); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior()); -#else - KSPSetOperators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior()); } else { matShellContext.kspLaplace=laplaceMatrixSolver->getSolver(); diff --git a/AMDiS/src/parallel/PetscSolverNavierStokes.cc b/AMDiS/src/parallel/PetscSolverNavierStokes.cc index 4098b347afec7dfe3dc1cd8b0b89adc2e13f825e..08b73abe2e40e649e62fd4652ab5a953bdb3660d 100644 --- a/AMDiS/src/parallel/PetscSolverNavierStokes.cc +++ b/AMDiS/src/parallel/PetscSolverNavierStokes.cc @@ -117,15 +117,11 @@ namespace AMDiS { namespace Parallel { MSG("CREATE POS 1: %p\n", &ksp); KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ns_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); // Create null space information. diff --git a/AMDiS/src/parallel/PetscSolverSchur.cc b/AMDiS/src/parallel/PetscSolverSchur.cc index 1f7f6ac52e5b86af4a6e24811cd600946e4c09f4..9e8226089385035c9f375602270e5e7f02fb1bea 100644 --- a/AMDiS/src/parallel/PetscSolverSchur.cc +++ b/AMDiS/src/parallel/PetscSolverSchur.cc @@ -20,6 +20,7 @@ #include "parallel/PetscSolverSchur.h" +#include "parallel/PetscHelper.h" #include "parallel/StdMpi.h" #include "parallel/MpiHelper.h" @@ -293,11 +294,7 @@ namespace AMDiS { namespace Parallel { KSPCreate(domainComm, &kspInterior); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(kspInterior, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(kspInterior, getMatInterior(), getMatInterior()); KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetFromOptions(kspInterior); diff --git a/AMDiS/src/solver/PetscSolver.hh b/AMDiS/src/solver/PetscSolver.hh index 51a057c91803c3a3b65f484e6ca679f1424c70a9..acece30df9e4145c97ce9e325235f3b47834c3a0 100644 --- a/AMDiS/src/solver/PetscSolver.hh +++ b/AMDiS/src/solver/PetscSolver.hh @@ -47,30 +47,30 @@ namespace AMDiS { if (params.matSolverPackage[kspSolver]) { // direct solvers - petsc_options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str()); - petsc_options_insert_string(("-" + kspPrefix + "pc_type lu").c_str()); - petsc_options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + (kspSolver != "direct" ? kspSolver : "umfpack")).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "pc_type lu").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + (kspSolver != "direct" ? kspSolver : "umfpack")).c_str()); oem.setMaxIterations(1); zeroStartVector = true; matSolverPackage = true; } else if (!params.emptyParam[kspSolver]) { // other solvers - petsc_options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str()); } // set the preconditioner setPrecon(); if (oem.getInfo() >= 20) - petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str()); else if (oem.getInfo() >= 10) - petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str()); + petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str()); // command line string std::string kspString = ""; Parameters::get(oem.getName() + "->ksp", kspString); if (kspString != "") - petsc_options_insert_string(kspString.c_str()); + petsc::options_insert_string(kspString.c_str()); } @@ -128,11 +128,7 @@ namespace AMDiS { void PetscRunner<M,V>::createSubSolver(KSP &ksp_, Mat m, std::string kspPrefix_) { KSPCreate(PETSC_COMM_SELF, &ksp_); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp_, m, m); -#else - KSPSetOperators(ksp_, m, m, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp_, m, m); KSPSetOptionsPrefix(ksp_, kspPrefix_.c_str()); KSPSetFromOptions(ksp_); } diff --git a/AMDiS/src/solver/PetscTypes.h b/AMDiS/src/solver/PetscTypes.h index 805a6cbde7643fd6755acd959a9a814f8184c5da..936b9f3f6f0ed90278a8f093949908d2e5638da4 100644 --- a/AMDiS/src/solver/PetscTypes.h +++ b/AMDiS/src/solver/PetscTypes.h @@ -144,16 +144,6 @@ namespace AMDiS { void operator>>(const PetscVectorNested& dest, VecMap<SystemVector, Mapper>& rhs); - inline PetscErrorCode petsc_options_insert_string(const char in_str[]) - { -#if (PETSC_VERSION_MINOR >= 7) - return PetscOptionsInsertString(NULL, in_str); -#else - return PetscOptionsInsertString(in_str); -#endif - } - - } // end namespace AMDiS #include "solver/PetscTypes.hh" diff --git a/extensions/demo/cahn_hilliard/src/PetscSolverCahnHilliard_DD.cc b/extensions/demo/cahn_hilliard/src/PetscSolverCahnHilliard_DD.cc index d2b7fc69f2c1840a62aea10c1268166851ae323d..ab4e556e20b2a4a677d724566fef5d0307f43517 100644 --- a/extensions/demo/cahn_hilliard/src/PetscSolverCahnHilliard_DD.cc +++ b/extensions/demo/cahn_hilliard/src/PetscSolverCahnHilliard_DD.cc @@ -68,11 +68,7 @@ namespace AMDiS { namespace Parallel { /// create new solver for S KSP kspS; KSPCreate(*data->mpiCommGlobal, &kspS); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(kspS, S, S); -#else - KSPSetOperators(kspS, S, S, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(kspS, S, S); petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 5); { PC pc; @@ -118,15 +114,11 @@ namespace AMDiS { namespace Parallel { MSG("CREATE POS 1: %p\n", &ksp); KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, KSPMonitorTrueResidualNorm, PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); if (useOldInitialGuess) diff --git a/extensions/preconditioner/PetscSolverNavierStokes2.cc b/extensions/preconditioner/PetscSolverNavierStokes2.cc index 44ab7b20847f7f92a7ae2bc2eb1c558cb7d31f5b..5d8142f3ee8cf352a4213b3441005c38a4a9440c 100644 --- a/extensions/preconditioner/PetscSolverNavierStokes2.cc +++ b/extensions/preconditioner/PetscSolverNavierStokes2.cc @@ -133,15 +133,11 @@ namespace AMDiS { namespace Parallel { { // Create FGMRES based outer solver KSPCreate(domainComm, &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "ns_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); // Create null space information. diff --git a/extensions/preconditioner/PetscSolverPfc.cc b/extensions/preconditioner/PetscSolverPfc.cc index ac7e38480dc4162a504e1988344a1ae381f93365..cd140e9fab42d97cd1dd2669570fcd6c28d63a16 100644 --- a/extensions/preconditioner/PetscSolverPfc.cc +++ b/extensions/preconditioner/PetscSolverPfc.cc @@ -78,15 +78,11 @@ namespace AMDiS { namespace Parallel { { // Create FGMRES based outer solver KSPCreate(meshDistributor->getMpiComm(0), &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "pfc_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); KSPSetFromOptions(ksp); diff --git a/extensions/preconditioner/PetscSolverPfc_diag.cc b/extensions/preconditioner/PetscSolverPfc_diag.cc index 68fa80c9a7050ae0ad417df2f12eb42c763a081a..6a040a4f3de041eefbca10b3bca872df3efa1423 100644 --- a/extensions/preconditioner/PetscSolverPfc_diag.cc +++ b/extensions/preconditioner/PetscSolverPfc_diag.cc @@ -76,15 +76,11 @@ namespace AMDiS { namespace Parallel { { // Create FGMRES based outer solver KSPCreate(meshDistributor->getMpiComm(0), &ksp); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(ksp, getMatInterior(), getMatInterior()); -#else - KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior()); if (getInfo() >= 10) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorDefault); else if (getInfo() >= 20) - KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); + petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm); petsc_helper::setSolver(ksp, "pfc_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); KSPSetFromOptions(ksp); @@ -167,11 +163,7 @@ namespace AMDiS { namespace Parallel { VecDestroy(&x); MatDestroy(&DK); -#if (PETSC_VERSION_MINOR >= 5) - KSPSetOperators(data.kspS, matS, matS); -#else - KSPSetOperators(data.kspS, matS, matS, SAME_NONZERO_PATTERN); -#endif + petsc::ksp_set_operators(data.kspS, matS, matS); // === Setup preconditioner data === data.delta = delta;