From 06dc55fb89bdcd66b6ecaeab9a7c677443c0b0d4 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Tue, 7 Jun 2011 06:08:17 +0000 Subject: [PATCH] Removed UMFPACK library from AMDiS. --- AMDiS/src/Global.cc | 9 +++-- AMDiS/src/Global.h | 2 +- AMDiS/src/Mesh.cc | 24 ++++++++----- AMDiS/src/parallel/CheckerPartitioner.cc | 8 +++++ AMDiS/src/parallel/PetscProblemStat.cc | 21 +++--------- AMDiS/src/parallel/PetscSolverFeti.cc | 43 +++++++++++++++++++----- 6 files changed, 70 insertions(+), 37 deletions(-) diff --git a/AMDiS/src/Global.cc b/AMDiS/src/Global.cc index c18e9013..941b12eb 100644 --- a/AMDiS/src/Global.cc +++ b/AMDiS/src/Global.cc @@ -332,7 +332,7 @@ namespace AMDiS { } - void processMemUsage(double& vm_usage, double& resident_set) + void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte) { using std::ios_base; using std::ifstream; @@ -361,8 +361,13 @@ namespace AMDiS { // in case x86-64 is configured to use 2MB pages long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024; - vm_usage = vsize / 1024.0; + vm_usage = vsize / 1024.0; resident_set = rss * page_size_kb; + + if (inMegaByte) { + vm_usage /= 1024.0; + resident_set /= 1024.0; + } } diff --git a/AMDiS/src/Global.h b/AMDiS/src/Global.h index 557600fa..0be17a4b 100644 --- a/AMDiS/src/Global.h +++ b/AMDiS/src/Global.h @@ -109,7 +109,7 @@ namespace AMDiS { void waitSec(int seconds); - void processMemUsage(double& vm_usage, double& resident_set); + void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte = true); /// Content comparision of two pointers. Used e.g. for find_if template<typename T> diff --git a/AMDiS/src/Mesh.cc b/AMDiS/src/Mesh.cc index 0b151343..e3c7615a 100644 --- a/AMDiS/src/Mesh.cc +++ b/AMDiS/src/Mesh.cc @@ -1213,18 +1213,19 @@ namespace AMDiS { Parameters::get(name + "->check", check); Parameters::get(name + "->preserve coarse dofs", preserveCoarseDOFs); - if (macroFilename.length()) { - // In parallel computations, check if a finer macro mesh is required. + TEST_EXIT(macroFilename.length()) + ("No mesh defined for parameter %s->macro file name !\n", name.c_str()); + + // In parallel computations, check if a finer macro mesh is required. #ifdef HAVE_PARALLEL_DOMAIN_AMDIS - checkParallelMacroFile(macroFilename, periodicFilename, check); + checkParallelMacroFile(macroFilename, periodicFilename, check); #endif - macroFileInfo = - MacroReader::readMacro(macroFilename, this, periodicFilename, check); + macroFileInfo = + MacroReader::readMacro(macroFilename, this, periodicFilename, check); - if (!valueFilename.length()) - clearMacroFileInfo(); - } + if (!valueFilename.length()) + clearMacroFileInfo(); initialized = true; } @@ -1251,11 +1252,18 @@ namespace AMDiS { localAdmin->setNumberOfDofs(admin[0]->getNumberOfDofs()); testMesh.addDOFAdmin(localAdmin); + MSG("START READ FILE %s\n", macroFilename); + MacroInfo *testMacroInfo = MacroReader::readMacro(macroFilename, &testMesh, periodicFilename, check); testMacroInfo->clear(); delete testMacroInfo; + MSG("TEST MESH HAS %d ELEMENTS\n", testMesh.getNumberOfMacros()); + + MPI::Finalize(); + exit(0); + // === Check the mesh structure. === diff --git a/AMDiS/src/parallel/CheckerPartitioner.cc b/AMDiS/src/parallel/CheckerPartitioner.cc index 7ba10fea..4dfa87a7 100644 --- a/AMDiS/src/parallel/CheckerPartitioner.cc +++ b/AMDiS/src/parallel/CheckerPartitioner.cc @@ -25,6 +25,8 @@ namespace AMDiS { int nElementsPerBlock = (mesh->getDim() == 2 ? 2 : 6); + MSG("ELS_PER_BLOCK = %d\n", nElementsPerBlock); + TraverseStack stack; ElInfo *elInfo = stack.traverseFirst(mesh, 0, Mesh::CALL_EL_LEVEL); while (elInfo) { @@ -33,6 +35,12 @@ namespace AMDiS { int elInRank = elIndex / nElementsPerBlock; TEST_EXIT_DBG(elInRank < mpiSize)("Should not happen!\n"); + if (elInRank == mpiRank) { + MSG("EL %d IS MY!\n", elIndex); + } else { + MSG("EL %d IS IN RANK %d\n", elIndex, elInRank); + } + elementInRank[elIndex] = (elInRank == mpiRank); partitionMap[elIndex] = elInRank; diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc index 5a3f624b..747a10b7 100644 --- a/AMDiS/src/parallel/PetscProblemStat.cc +++ b/AMDiS/src/parallel/PetscProblemStat.cc @@ -70,16 +70,11 @@ namespace AMDiS { double wtime = MPI::Wtime(); double vm, rss; - processMemUsage(vm, rss); - vm /= 1024.0; - rss /= 1024.0; - + processMemUsage(vm, rss); MSG("STAGE 1\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); mpi::globalAdd(vm); mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); petscSolver->setMeshDistributor(meshDistributor); @@ -87,28 +82,20 @@ namespace AMDiS { processMemUsage(vm, rss); - vm /= 1024.0; - rss /= 1024.0; MSG("STAGE 2\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); mpi::globalAdd(vm); mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); petscSolver->solvePetscMatrix(*solution, adaptInfo); processMemUsage(vm, rss); - vm /= 1024.0; - rss /= 1024.0; MSG("STAGE 3\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); mpi::globalAdd(vm); mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index d0274160..423c1427 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -856,6 +856,10 @@ namespace AMDiS { { FUNCNAME("PetscSolverFeti::fillPetscMatrix()"); +// double vm, mem; +// processMemUsage(vm, mem); +// MSG("MEM INFO 1 = %f\n", mem); + nComponents = vec->getSize(); // === Create all sets and indices. === @@ -874,45 +878,48 @@ namespace AMDiS { MatCreateMPIAIJ(PETSC_COMM_WORLD, nRowsRankB, nRowsRankB, nRowsOverallB, nRowsOverallB, - 100, PETSC_NULL, 100, PETSC_NULL, &mat_b_b); + 30, PETSC_NULL, 0, PETSC_NULL, &mat_b_b); MatCreateMPIAIJ(PETSC_COMM_WORLD, nRowsRankPrimal, nRowsRankPrimal, nRowsOverallPrimal, nRowsOverallPrimal, - 10, PETSC_NULL, 10, PETSC_NULL, &mat_primal_primal); + 30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_primal); MatCreateMPIAIJ(PETSC_COMM_WORLD, nRowsRankB, nRowsRankPrimal, nRowsOverallB, nRowsOverallPrimal, - 100, PETSC_NULL, 100, PETSC_NULL, &mat_b_primal); + 30, PETSC_NULL, 30, PETSC_NULL, &mat_b_primal); MatCreateMPIAIJ(PETSC_COMM_WORLD, nRowsRankPrimal, nRowsRankB, nRowsOverallPrimal, nRowsOverallB, - 100, PETSC_NULL, 100, PETSC_NULL, &mat_primal_b); + 30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b); // === Create matrices for FETI-DP preconditioner. === if (fetiPreconditioner != FETI_NONE) MatCreateSeqAIJ(PETSC_COMM_SELF, - nRowsDual, nRowsDual, 100, PETSC_NULL, + nRowsDual, nRowsDual, 30, PETSC_NULL, &mat_duals_duals); if (fetiPreconditioner == FETI_DIRICHLET) { MatCreateSeqAIJ(PETSC_COMM_SELF, - nRowsInterior, nRowsInterior, 100, PETSC_NULL, + nRowsInterior, nRowsInterior, 30, PETSC_NULL, &mat_interior_interior); MatCreateSeqAIJ(PETSC_COMM_SELF, - nRowsInterior, nRowsDual, 100, PETSC_NULL, + nRowsInterior, nRowsDual, 30, PETSC_NULL, &mat_interior_duals); MatCreateSeqAIJ(PETSC_COMM_SELF, - nRowsDual, nRowsInterior, 100, PETSC_NULL, + nRowsDual, nRowsInterior, 30, PETSC_NULL, &mat_duals_interior); } +// processMemUsage(vm, mem); +// MSG("MEM INFO 2 = %f\n", mem); + // === Prepare traverse of sequentially created matrices. === @@ -1121,7 +1128,9 @@ namespace AMDiS { } } } - + +// processMemUsage(vm, mem); +// MSG("MEM INFO 3 = %f\n", mem); // === Start global assembly procedure. === @@ -1137,6 +1146,8 @@ namespace AMDiS { MatAssemblyBegin(mat_primal_b, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(mat_primal_b, MAT_FINAL_ASSEMBLY); +// processMemUsage(vm, mem); +// MSG("MEM INFO 4 = %f\n", mem); // === Start global assembly procedure for preconditioner matrices. === @@ -1157,6 +1168,9 @@ namespace AMDiS { } +// processMemUsage(vm, mem); +// MSG("MEM INFO 5 = %f\n", mem); + // === Create and fill PETSc's right hand side vectors. === VecCreate(PETSC_COMM_WORLD, &f_b); @@ -1196,20 +1210,31 @@ namespace AMDiS { VecAssemblyBegin(f_primal); VecAssemblyEnd(f_primal); +// processMemUsage(vm, mem); +// MSG("MEM INFO 6 = %f\n", mem); + // === Create and fill PETSc matrix for Lagrange constraints. === createMatLagrange(); +// processMemUsage(vm, mem); +// MSG("MEM INFO 7 = %f\n", mem); // === Create PETSc solver for the Schur complement on primal variables. === createSchurPrimalKsp(); +// processMemUsage(vm, mem); +// MSG("MEM INFO 8 = %f\n", mem); + // === Create PETSc solver for the FETI-DP operator. === createFetiKsp(); + +// processMemUsage(vm, mem); +// MSG("MEM INFO 9 = %f\n", mem); } -- GitLab