/****************************************************************************** * * AMDiS - Adaptive multidimensional simulations * * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved. * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis * * Authors: * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al. * * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. * * * This file is part of AMDiS * * See also license.opensource.txt in the distribution. * ******************************************************************************/ /** \file PetscHelper.h */ #ifndef AMDIS_PETSCHELPER_H #define AMDIS_PETSCHELPER_H #include #include #include #include #include "AMDiS_fwd.h" namespace AMDiS { namespace Parallel { /** \brief * In this namespace, we collect several auxiliary functions for using * PETSc in AMDiS. Many of these function may be replaced by new PETSc * function in upcoming versions. */ namespace petsc_helper { /// Defines a PETSc matrix column wise typedef std::pair, std::vector > SparseCol; typedef std::map PetscMatCol; /** \brief * Returns for a distributed matrix on each rank the local matrix in a * sparce column format. * * \param[in] mat PETSc distributerd matrix. * \param[out] matCol The sparse column represenation of the local matrix. */ void getMatLocalColumn(Mat mat, PetscMatCol &matCol); /** \brief * Set a local column vector in a distributed matrix. * * \param[out] mat Distributed matrix. * \param[in] column Column index. * \param[in] vec Column vector. */ void setMatLocalColumn(Mat mat, int column, Vec vec); /** \brief * Create a local PETSc vector representing the column of a matrix * stored in \ref PetscMatCol type format. * * \param[in] column Sparse column representation. * \param[out] vec Vector representing one column of the matrix. */ void getColumnVec(const SparseCol &matCol, Vec vec); /** \brief * Computes the matrix matrix product inv(A) B = C. Matrices B and C * are distributed matrices. Matrix A is a local matrix on each MPI * task. The overall number of rows of local matrices A must be the * number of distriubted rows in B. * * \param[in] mpiComm MPI Communicator object (must fit with ksp) * \param[in] ksp inv(A) matrix given by a PETSc solver object. * \param[in] mat0 matrix B * \param[out] mat1 resulting matrix C, is created inside the function */ void blockMatMatSolve(MPI::Intracomm mpiComm, KSP ksp, Mat mat0, Mat &mat1); /** \brief * Converts a 2x2 nested matrix to a MATAIJ matrix (thus not nested). * * \param[in] matNest nested input matrix * \param[out] mat matrix of type MATAIJ, created inside this function. */ void matNestConvert(Mat matNest, Mat &mat); void setSolverWithLu(KSP ksp, const char* kspPrefix, KSPType kspType, PCType pcType, const MatSolverPackage matSolverPackage, PetscReal rtol = PETSC_DEFAULT, PetscReal atol = PETSC_DEFAULT, PetscInt maxIt = PETSC_DEFAULT); void setSolver(KSP ksp, const char* kspPrefix, KSPType kspType, PCType pcType, PetscReal rtol = PETSC_DEFAULT, PetscReal atol = PETSC_DEFAULT, PetscInt maxIt = PETSC_DEFAULT); void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0); } // end namespace petsc_helper } // end namespace Parallel // functions for PETSc API changes namespace petsc { inline PetscErrorCode options_view(PetscViewer viewer) { #if (PETSC_VERSION_MINOR >= 7) return PetscOptionsView(PETSC_NULL, viewer); #else return PetscOptionsView(viewer); #endif } inline PetscErrorCode options_insert_string(const char in_str[]) { #if (PETSC_VERSION_MINOR >= 7) return PetscOptionsInsertString(PETSC_NULL, in_str); #else return PetscOptionsInsertString(in_str); #endif } inline PetscErrorCode ksp_set_operators(KSP ksp, Mat Amat,Mat Pmat) { #if (PETSC_VERSION_MINOR >= 5) return KSPSetOperators(ksp, Amat, Pmat); #else return KSPSetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN); #endif } inline PetscErrorCode ksp_get_operators(KSP ksp, Mat *Amat,Mat *Pmat) { #if (PETSC_VERSION_MINOR >= 5) return KSPGetOperators(ksp, Amat, Pmat); #else return KSPGetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN); #endif } template inline PetscErrorCode ksp_monitor_set(KSP ksp, Monitor monitor) { #if (PETSC_VERSION_MINOR >= 7) PetscViewerAndFormat *vf; PetscErrorCode ierr; ierr = PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,&vf);CHKERRQ(ierr); ierr = KSPMonitorSet(ksp,(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr); return ierr; #else return KSPMonitorSet(ksp, monitor, PETSC_NULL, PETSC_NULL); #endif } inline PetscErrorCode mat_create_vecs(Mat mat,Vec *right,Vec *left) { #if (PETSC_VERSION_MINOR >= 6) return MatCreateVecs(mat, right, left); #else return MatGetVecs(mat, right, left); #endif } inline PetscErrorCode mat_nullspace_remove(MatNullSpace sp,Vec vec) { #if (PETSC_VERSION_MINOR >= 5) return MatNullSpaceRemove(sp, vec); #else return MatNullSpaceRemove(sp, vec, PETSC_NULL); #endif } } // end namespace petsc } // end namespace AMDiS #endif