From 323d3c5c351fb9914da48cbb47322d3ef73bf60c Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Mon, 29 Oct 2012 15:43:16 +0000 Subject: [PATCH] d --- AMDiS/src/parallel/PetscSolverFeti.cc | 83 ++++++- AMDiS/src/parallel/PetscSolverFeti.h | 6 + .../src/parallel/PetscSolverFetiOperators.cc | 35 ++- AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 203 ------------------ 4 files changed, 111 insertions(+), 216 deletions(-) diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 5cdc1ce4..597374c6 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -43,6 +43,7 @@ namespace AMDiS { rStartInterior(0), nGlobalOverallInterior(0), printTimings(false), + dirichletMode(0), stokesMode(false), augmentedLagrange(false), pressureComponent(-1) @@ -108,8 +109,10 @@ namespace AMDiS { if (subdomain == NULL) { subdomain = new PetscSolverGlobalMatrix(""); subdomain->setSymmetric(isSymmetric); - // subdomain->setHandleDirichletRows(false); - subdomain->setHandleDirichletRows(true); + if (dirichletMode == 0) + subdomain->setHandleDirichletRows(true); + else + subdomain->setHandleDirichletRows(false); if (meshLevel == 0) { subdomain->setMeshDistributor(meshDistributor, @@ -143,15 +146,15 @@ namespace AMDiS { { FUNCNAME("PetscSolverFeti::createDirichletData()"); - return; - - int nComponents = mat.getSize(); - for (int component = 0; component < nComponents; component++) { - DOFMatrix* dofMat = mat[component][component]; - if (!dofMat) - continue; - - dirichletRows[component] = dofMat->getDirichletRows(); + if (dirichletMode == 1) { + int nComponents = mat.getSize(); + for (int component = 0; component < nComponents; component++) { + DOFMatrix* dofMat = mat[component][component]; + if (!dofMat) + continue; + + dirichletRows[component] = dofMat->getDirichletRows(); + } } } @@ -2115,6 +2118,35 @@ namespace AMDiS { interfaceDofMap.createVec(vecRhsInterface); interfaceDofMap.createVec(vecSolInterface); + { + // PetscViewer petscView; + // PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec", + // FILE_MODE_READ, &petscView); + + // VecLoad(vecSolInterface, petscView); + // PetscViewerDestroy(&petscView); + } + { + // PetscViewer petscView; + // PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec", + // FILE_MODE_READ, &petscView); + + // VecLoad(vecSolLagrange, petscView); + // PetscViewerDestroy(&petscView); + } + + { + int n; + VecGetSize(vecSolInterface, &n); + double sum; + VecSum(vecSolInterface, &sum); + sum = -sum / static_cast<int>(n); + MSG("AVRG = %e\n", sum); + } + + + + Vec vecRhsArray[2] = {vecRhsInterface, vecRhsLagrange}; VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecRhsArray, &vecRhs); @@ -2230,8 +2262,37 @@ namespace AMDiS { PetscSolverFetiDebug::debugFeti(*this, vecRhs); // === Solve with FETI-DP operator. === + KSPSetInitialGuessNonzero(ksp_feti, PETSC_TRUE); KSPSolve(ksp_feti, vecRhs, vecSol); + { + int n; + VecGetSize(vecSolInterface, &n); + double sum; + VecSum(vecSolInterface, &sum); + sum = -sum / static_cast<int>(n); + MSG("SOL PRESSURE AVRG = %e\n", sum); + } + + + + { + PetscViewer petscView; + PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec", + FILE_MODE_WRITE, &petscView); + VecView(vecSolInterface, petscView); + PetscViewerDestroy(&petscView); + } + + + { + PetscViewer petscView; + PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec", + FILE_MODE_WRITE, &petscView); + VecView(vecSolLagrange, petscView); + PetscViewerDestroy(&petscView); + } + if (printTimings) { MPI::COMM_WORLD.Barrier(); diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h index f761230e..7b73c2e5 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.h +++ b/AMDiS/src/parallel/PetscSolverFeti.h @@ -334,6 +334,12 @@ namespace AMDiS { int nOverallEdges; + /// There are two different dirichlet modes: + /// 0: dirichlet rows are zeroed and a diagonal element is set to one. + /// 1: dirichlet rows are removed (this mode does not work correctly, but + /// many function are prepered to make use of it) + int dirichletMode; + /// If true, the FETI-DP solver is applied to a Stokes like problem. Thus, /// there is a pressure variable which is not part of the coarse grid /// problem. diff --git a/AMDiS/src/parallel/PetscSolverFetiOperators.cc b/AMDiS/src/parallel/PetscSolverFetiOperators.cc index 2b66bd67..f73c2c06 100644 --- a/AMDiS/src/parallel/PetscSolverFetiOperators.cc +++ b/AMDiS/src/parallel/PetscSolverFetiOperators.cc @@ -252,12 +252,33 @@ namespace AMDiS { double wtime = MPI::Wtime(); + { + MatNullSpace nullSpace; + MatGetNullSpace(mat, &nullSpace); + + PetscBool hasConst; + PetscInt nVec; + const Vec *vecs; + // MatNullSpaceGetVecs(nullSpace, &hasConst, &nVec, &vecs); + MatNullSpaceRemove(nullSpace, x, PETSC_NULL); + } + + Vec x_interface, x_lagrange, y_interface, y_lagrange; VecNestGetSubVec(x, 0, &x_interface); VecNestGetSubVec(x, 1, &x_lagrange); VecNestGetSubVec(y, 0, &y_interface); VecNestGetSubVec(y, 1, &y_lagrange); + { + int n; + VecGetSize(x_interface, &n); + double sum; + VecSum(x_interface, &sum); + sum = -sum / static_cast<int>(n); + MSG("xbegin = %e\n", sum); + } + void *ctx; MatShellGetContext(mat, &ctx); FetiData* data = static_cast<FetiData*>(ctx); @@ -316,6 +337,16 @@ namespace AMDiS { // y_interface = A_{\Gamma B} tmp_vec_b0 MatMult(data->subSolver->getMatCoarseInterior(1), data->tmp_vec_b0, y_interface); + { + int n; + VecGetSize(y_interface, &n); + double sum; + VecSum(y_interface, &sum); + sum = -sum / static_cast<int>(n); + MSG("yend = %e\n", sum); + } + + // tmp_vec_primal0 = S_{\Pi\Pi}^{-1} (v_{\Pi} - A_{\Pi B} A_{BB}^{-1} v_{B}) // tmp_vec_interface = A_{\Gamma \Pi} tmp_vec_primal0 MatMult(data->subSolver->getMatCoarse(1, 0), data->tmp_vec_primal0, data->tmp_vec_interface); @@ -466,12 +497,12 @@ namespace AMDiS { VecNestGetSubVec(yvec, 0, &y_interface); VecNestGetSubVec(yvec, 1, &y_lagrange); - KSPSolve(data->ksp_mass, x_interface, y_interface); + VecCopy(x_interface, y_interface); + // KSPSolve(data->ksp_mass, x_interface, y_interface); MatMultTranspose(*(data->mat_lagrange_scaled), x_lagrange, data->tmp_vec_b0); // === Restriction of the B nodes to the boundary nodes. === - int nLocalB; int nLocalDuals; VecGetLocalSize(data->tmp_vec_b0, &nLocalB); diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index f2a02ccc..7cb273bf 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -65,8 +65,6 @@ namespace AMDiS { matAssembly(); - // removeDirichletRows(seqMat); - if (printMatInfo) { MatInfo matInfo; MatGetInfo(getMatInterior(), MAT_GLOBAL_SUM, &matInfo); @@ -246,8 +244,6 @@ namespace AMDiS { matAssembly(); - // removeDirichletRows(seqMat); - // === Create solver for the non primal (thus local) variables. === KSPCreate(mpiCommLocal, &kspInterior); @@ -289,8 +285,6 @@ namespace AMDiS { vecRhsAssembly(); - // removeDirichletRows(vec); - // === For debugging allow to write the rhs vector to a file. === bool dbgWriteRhs = false; @@ -479,203 +473,6 @@ namespace AMDiS { } - void PetscSolverGlobalMatrix::removeDirichletRows(Matrix<DOFMatrix*> *seqMat) - { - FUNCNAME("PetscSolverGlobalMatrix::removeDirichletRows()"); - - if (!handleDirichletRows) - return; - - vector<int> dRowsInterior, dRowsCoarse; - vector<int> dColsInterior, dColsCoarse; - vector<double> dValuesInterior, dValuesCoarse; - - int nComponents = seqMat->getSize(); - - for (int rowComp = 0; rowComp < nComponents; rowComp++) { - bool dirichletRow = false; - int dirichletMainCol = -1; - - for (int colComp = 0; colComp < nComponents; colComp++) { - DOFMatrix *dofMat = (*seqMat)[rowComp][colComp]; - if (!dofMat) - continue; - - BoundaryIndexMap& bounds = - const_cast<BoundaryIndexMap&>(dofMat->getBoundaryManager()->getBoundaryConditionMap()); - - for (BoundaryIndexMap::iterator bIt = bounds.begin(); bIt != bounds.end(); ++bIt) { - if (bIt->second && bIt->second->isDirichlet()) { - dirichletRow = true; - if ((dynamic_cast<DirichletBC*>(bIt->second))->applyBoundaryCondition()) { - dirichletMainCol = colComp; - break; - } - } - } - } - - if (!dirichletRow) - continue; - - DOFMatrix *dofMat = (*seqMat)[rowComp][dirichletMainCol]; - TEST_EXIT(dofMat->getRowFeSpace() == dofMat->getColFeSpace()) - ("I have to think about this scenario! Really possible?\n"); - - - std::set<DegreeOfFreedom> &dRows = dofMat->getDirichletRows(); - for (std::set<DegreeOfFreedom>::iterator dofIt = dRows.begin(); - dofIt != dRows.end(); ++dofIt) { - if (hasCoarseSpace()) { - bool isRowCoarse = isCoarseSpace(rowComp, *dofIt); - bool isColCoarse = isCoarseSpace(dirichletMainCol, *dofIt); - TEST_EXIT(isRowCoarse == isColCoarse) - ("Really possible? So reimplement AMDiS from the scratch!\n"); - - if (isRowCoarse) { - ParallelDofMapping *rowCoarseSpace = coarseSpaceMap[rowComp]; - ParallelDofMapping *colCoarseSpace = coarseSpaceMap[dirichletMainCol]; - - MultiIndex multiIndex; - if ((*rowCoarseSpace)[rowComp].find(*dofIt, multiIndex) && - (*rowCoarseSpace)[rowComp].isRankDof(*dofIt)) { - int rowIndex = rowCoarseSpace->getLocalMatIndex(rowComp, *dofIt); - int colIndex = colCoarseSpace->getLocalMatIndex(dirichletMainCol, *dofIt); - dRowsCoarse.push_back(rowIndex); - dColsCoarse.push_back(colIndex); - dValuesCoarse.push_back(1.0); - } - } else { - - MultiIndex multiIndex; - if ((*interiorMap)[rowComp].find(*dofIt, multiIndex) && - (*interiorMap)[rowComp].isRankDof(*dofIt)) { - int rowIndex = interiorMap->getLocalMatIndex(rowComp, *dofIt); - int colIndex = interiorMap->getLocalMatIndex(dirichletMainCol, *dofIt); - dRowsInterior.push_back(rowIndex); - dColsInterior.push_back(colIndex); - dValuesInterior.push_back(1.0); - } - } - } else { - MultiIndex multiIndex; - if ((*interiorMap)[rowComp].find(*dofIt, multiIndex) && - (*interiorMap)[rowComp].isRankDof(*dofIt)) { - int rowIndex = interiorMap->getMatIndex(rowComp, multiIndex.global); - int colIndex = interiorMap->getMatIndex(dirichletMainCol, multiIndex.global); - dRowsInterior.push_back(rowIndex); - dColsInterior.push_back(colIndex); - dValuesInterior.push_back(1.0); - } - } - } - } - - { - Mat mpiMat = getMatInterior(); - MatZeroRows(mpiMat, dRowsInterior.size(), &(dRowsInterior[0]), 0.0, - PETSC_NULL, PETSC_NULL); - - for (int i = 0; i < static_cast<int>(dRowsInterior.size()); i++) - MatSetValue(mpiMat, dRowsInterior[i], dColsInterior[i], - dValuesInterior[i], INSERT_VALUES); - - MatAssemblyBegin(mpiMat, MAT_FINAL_ASSEMBLY); - MatAssemblyEnd(mpiMat, MAT_FINAL_ASSEMBLY); - } - - if (hasCoarseSpace()) { - MatZeroRows(getMatInteriorCoarse(0), - dRowsInterior.size(), &(dRowsInterior[0]), 0.0, PETSC_NULL, PETSC_NULL); - MatZeroRows(getMatInteriorCoarse(1), - dRowsInterior.size(), &(dRowsInterior[0]), 0.0, PETSC_NULL, PETSC_NULL); - - - MatZeroRows(getMatCoarseInterior(0), - dRowsCoarse.size(), &(dRowsCoarse[0]), 0.0, PETSC_NULL, PETSC_NULL); - - Mat mpiMat = getMatCoarse(); - MatZeroRows(mpiMat, dRowsCoarse.size(), &(dRowsCoarse[0]), 0.0, - PETSC_NULL, PETSC_NULL); - - for (int i = 0; i < static_cast<int>(dRowsCoarse.size()); i++) - MatSetValue(mpiMat, dRowsCoarse[i], dColsCoarse[i], - dValuesCoarse[i], INSERT_VALUES); - - MatAssemblyBegin(mpiMat, MAT_FINAL_ASSEMBLY); - MatAssemblyEnd(mpiMat, MAT_FINAL_ASSEMBLY); - } - } - - - void PetscSolverGlobalMatrix::removeDirichletRows(SystemVector *seqVec) - { - FUNCNAME("PetscSolverGlobalMatrix::removeDirichletRows()"); - - if (!handleDirichletRows) - return; - - int cInterior = 0; - int cCoarse = 0; - - int nComponents = seqVec->getSize(); - for (int component = 0; component < nComponents; component++) { - DOFVector<double> *dofVec = seqVec->getDOFVector(component); - map<DegreeOfFreedom, double>& dValues = dofVec->getDirichletValues(); - - for (map<DegreeOfFreedom, double>::iterator dofIt = dValues.begin(); - dofIt != dValues.end(); ++dofIt) { - if (hasCoarseSpace()) { - if (isCoarseSpace(component, dofIt->first)) { - ParallelDofMapping *rowCoarseSpace = coarseSpaceMap[component]; - MultiIndex multiIndex; - if ((*rowCoarseSpace)[component].find(dofIt->first, multiIndex) && - (*rowCoarseSpace)[component].isRankDof(dofIt->first)) { - if (dofIt->second > 0.5) - cCoarse++; - VecSetValue(getVecRhsCoarse(), - rowCoarseSpace->getMatIndex(component, multiIndex.global), - dofIt->second, INSERT_VALUES); - } - } else { - MultiIndex multiIndex; - if ((*interiorMap)[component].find(dofIt->first, multiIndex) && - (*interiorMap)[component].isRankDof(dofIt->first)) { - if (dofIt->second > 0.5) - cInterior++; - VecSetValue(getVecRhsInterior(), - interiorMap->getLocalMatIndex(component, dofIt->first), - dofIt->second, INSERT_VALUES); - } - } - } else { - MultiIndex multiIndex; - if ((*interiorMap)[component].find(dofIt->first, multiIndex) && - (*interiorMap)[component].isRankDof(dofIt->first)) { - if (dofIt->second > 0.5) - cInterior++; - VecSetValue(getVecRhsInterior(), - interiorMap->getMatIndex(component, multiIndex.global), - dofIt->second, INSERT_VALUES); - } - } - } - } - - VecAssemblyBegin(getVecRhsInterior()); - VecAssemblyEnd(getVecRhsInterior()); - - if (hasCoarseSpace()) { - VecAssemblyBegin(getVecRhsCoarse()); - VecAssemblyEnd(getVecRhsCoarse()); - } - - mpi::globalAdd(cInterior); - mpi::globalAdd(cCoarse); - MSG("WORKED ON DIRICHLET DOFS: %d %d\n", cInterior, cCoarse); - } - - void PetscSolverGlobalMatrix::createFieldSplit(PC pc) { FUNCNAME("PetscSolverGlobalMatrix::createFieldSplit()"); -- GitLab