diff --git a/AMDiS/src/ParallelDomainBase.cc b/AMDiS/src/ParallelDomainBase.cc index eb5ee22b12f0fcd76194f95841b62fbdb4767238..cd1fceaed3a4a095db02f7ad0425b51309ccb670 100644 --- a/AMDiS/src/ParallelDomainBase.cc +++ b/AMDiS/src/ParallelDomainBase.cc @@ -1040,6 +1040,7 @@ namespace AMDiS { periodicBoundary.boundary.size())]; int requestCounter = 0; + // === The owner of the boundaries send their atomic boundary order to the === // === neighbouring ranks. === @@ -1070,6 +1071,10 @@ namespace AMDiS { MPI::Request::Waitall(requestCounter, request); + + // === The information about all neighbouring boundaries has been received. So === + // === the rank tests if its own atomic boundaries are in the same order. If === + // === not, the atomic boundaries are swaped to the correct order. === int i = 0; for (RankToBoundMap::iterator rankIt = otherIntBoundary.boundary.begin(); @@ -1111,7 +1116,7 @@ namespace AMDiS { sendBuffers.clear(); - // === Deal with periodic boundaries. === + // === Do the same for the periodic boundaries. === requestCounter = 0; @@ -1213,53 +1218,6 @@ namespace AMDiS { createDofMemberInfo(partitionDOFs, rankDofs, rankAllDofs, boundaryDofs, vertexDof); -#if 0 - // TODO: MACHE RICHTIGE FUNKTION - if (mpiRank == 0) { - std::cout << "RANK OWNED DOFS: " << std::endl; - for (DofContainer::iterator dofit = rankDofs.begin(); - dofit != rankDofs.end(); ++dofit) { - std::cout << **dofit << std::endl; - WorldVector<double> coords; - mesh->getDofIndexCoords(*dofit, feSpace, coords); - coords.print(); - } - - std::cout << "RANK ALL DOFS: " << std::endl; - for (DofContainer::iterator dofit = rankAllDofs.begin(); - dofit != rankAllDofs.end(); ++dofit) { - std::cout << **dofit << std::endl; - WorldVector<double> coords; - mesh->getDofIndexCoords(*dofit, feSpace, coords); - coords.print(); - } - } -#endif - - // === BEGIN EXP - - DofIndexToBool rankAllDofIndices; - for (DofContainer::iterator dofIt = rankAllDofs.begin(); - dofIt != rankAllDofs.end(); ++dofIt) { - rankAllDofIndices[**dofIt] = true; - } - - for (std::map<BoundaryType, VertexVector*>::iterator it = mesh->getPeriodicAssociations().begin(); - it != mesh->getPeriodicAssociations().end(); ++it) { - VertexVector *tmp = it->second; - DOFIteratorBase it(const_cast<DOFAdmin*>(tmp->getAdmin()), USED_DOFS); - for (it.reset(); !it.end(); ++it) { - if (!it.isDOFFree()) { - if (!rankAllDofIndices[(*tmp)[it.getDOFIndex()]] && - rankAllDofIndices[it.getDOFIndex()]) - // (*tmp)[it.getDOFIndex()] = -1; - (*tmp)[it.getDOFIndex()] = it.getDOFIndex(); - } - } - } - - // === ENDE EXP - nRankDofs = rankDofs.size(); nOverallDOFs = partitionDOFs.size(); @@ -1434,7 +1392,7 @@ namespace AMDiS { bool found = false; - // Iterate over all boundary dofs to find the dof, which index we have to change. + // Iterate over all boundary dofs to find the dof which index we have to change. for (DofToRank::iterator dofIt = boundaryDofs.begin(); dofIt != boundaryDofs.end(); ++dofIt) { @@ -1457,22 +1415,6 @@ namespace AMDiS { delete [] recvBuffers[i]; } - // ====== BEGIN TEST - - std::map<int, int> dofIndexMap; - for (DofIndexMap::iterator dofIt = rankDofsNewLocalIndex.begin(); - dofIt != rankDofsNewLocalIndex.end(); ++dofIt) { - dofIndexMap[*(dofIt->first)] = dofIt->second; - } - - for (std::map<BoundaryType, VertexVector*>::iterator it = mesh->getPeriodicAssociations().begin(); - it != mesh->getPeriodicAssociations().end(); ++it) { - it->second->changeDofIndices(dofIndexMap); - } - - - // ====== ENDE TEST - // === Create now the local to global index and local to dof index mappings. === @@ -1931,12 +1873,16 @@ namespace AMDiS { { FUNCNAME("ParallelDomainBase::createPeriodicMap()"); + // Clear all periodic dof mappings calculated before. We do it from scratch. periodicDof.clear(); MPI::Request request[periodicBoundary.boundary.size() * 2]; int requestCounter = 0; std::vector<int*> sendBuffers, recvBuffers; + // === Each rank traverse its periodic boundaries and sends the dof indices === + // === to the rank "on the other side" of the periodic boundary. === + for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin(); it != periodicBoundary.boundary.end(); ++it) { @@ -1945,34 +1891,29 @@ namespace AMDiS { DofContainer dofs; + // Create dof indices on the boundary. + for (std::vector<AtomicBoundary>::iterator boundIt = it->second.begin(); boundIt != it->second.end(); ++boundIt) { addVertexDofs(boundIt->rankObject.el, boundIt->rankObject.ithObjAtBoundary, dofs, true); - // std::cout << "-------: " << dofs.size() << std::endl; - addEdgeDofs(boundIt->rankObject.el, boundIt->rankObject.ithObjAtBoundary, + addEdgeDofs(boundIt->rankObject.el, boundIt->rankObject.ithObjAtBoundary, dofs); } - - int *sendbuf = new int[dofs.size()]; - for (int i = 0; i < static_cast<int>(dofs.size()); i++) { - if (mpiRank == 0) { -// std::cout << "[" << mpiRank << "] SEND DOF " -// << *(dofs[i]) << " " << mapLocalGlobalDOFs[*(dofs[i])] << std::endl; - -// WorldVector<double> coords; -// mesh->getDofIndexCoords(dofs[i], feSpace, coords); -// coords.print(); - } - sendbuf[i] = mapLocalGlobalDOFs[*(dofs[i])]; - } + // Send the global indices to the rank on the other side. + + int *sendbuf = new int[dofs.size()]; + for (int i = 0; i < static_cast<int>(dofs.size()); i++) + sendbuf[i] = mapLocalGlobalDOFs[*(dofs[i])]; request[requestCounter++] = mpiComm.Isend(sendbuf, dofs.size(), MPI_INT, it->first, 0); sendBuffers.push_back(sendbuf); + // Receive from this rank the same number of dofs. + int *recvbuf = new int[dofs.size()]; request[requestCounter++] = mpiComm.Irecv(recvbuf, dofs.size(), MPI_INT, it->first, 0); @@ -1984,11 +1925,17 @@ namespace AMDiS { MPI::Request::Waitall(requestCounter, request); + // === The rank has received the dofs from the rank on the other side of === + // === the boundary. Now it can use them to create the mapping between === + // === the periodic dofs in this rank and the corresponding periodic === + // === dofs from the other ranks. === + int i = 0; for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin(); it != periodicBoundary.boundary.end(); ++it) { DofContainer dofs; + // Create the dofs on the boundary in inverse order. for (std::vector<AtomicBoundary>::iterator boundIt = it->second.begin(); boundIt != it->second.end(); ++boundIt) { DofContainer tmpdofs; @@ -1996,61 +1943,21 @@ namespace AMDiS { tmpdofs); addVertexDofs(boundIt->rankObject.el, boundIt->rankObject.ithObjAtBoundary, tmpdofs, true); - // std::cout << "-------: " << tmpdofs.size() << std::endl; - for (int j = static_cast<int>(tmpdofs.size()) - 1; j >= 0; j--) { - dofs.push_back(tmpdofs[j]); - } + for (int j = static_cast<int>(tmpdofs.size()) - 1; j >= 0; j--) + dofs.push_back(tmpdofs[j]); } - for (int j = 0; j < static_cast<int>(dofs.size()); j++) { - if (mpiRank == 1) { -// std::cout << "[" << mpiRank << "] RECV DOF " -// << *(dofs[j]) << " " << mapLocalGlobalDOFs[*(dofs[j])] << std::endl; - -// WorldVector<double> coords; -// mesh->getDofIndexCoords(dofs[j], feSpace, coords); -// coords.print(); - } - - periodicDof[mapLocalGlobalDOFs[*(dofs[j])]].insert(recvBuffers[i][j]); - } + // Added the received dofs to the mapping. + for (int j = 0; j < static_cast<int>(dofs.size()); j++) + periodicDof[mapLocalGlobalDOFs[*(dofs[j])]].insert(recvBuffers[i][j]); delete [] sendBuffers[i]; delete [] recvBuffers[i]; i++; } - for (PeriodicDofMap::iterator it = periodicDof.begin(); - it != periodicDof.end(); ++it) { - // std::cout << "[" << mpiRank << "] has per dof " << it->first << ": "; -// for (std::set<DegreeOfFreedom>::iterator dofit = it->second.begin(); -// dofit != it->second.end(); ++dofit) -// std::cout << *dofit << " "; -// std::cout << std::endl; - - int localdof = 0; - - for (DofMapping::iterator dofit = mapLocalGlobalDOFs.begin(); - dofit != mapLocalGlobalDOFs.end(); ++dofit) - if (dofit->second == it->first) { - localdof = dofit->first; - break; - } - -// WorldVector<double> coords; -// mesh->getDofIndexCoords(localdof, feSpace, coords); -// coords.print(); - } - - for (PeriodicDofMap::iterator it = periodicDof.begin(); - it != periodicDof.end(); ++it) { - if (it->second.size() == 2) { - std::cout << "IN RANK " << mpiRank << " and dof " << it->first - << ": " << *(it->second.begin()) << " " << *(++(it->second.begin())) << std::endl; - } - } + // TODO search for 2 dof mappings. - switch (mpiRank) { case 0: periodicDof[0].insert(3136); @@ -2425,7 +2332,7 @@ namespace AMDiS { } - void ParallelDomainBase::writeMapLocalGlobal(int rank) + void ParallelDomainBase::printMapLocalGlobal(int rank) { if (rank == -1 || mpiRank == rank) { std::cout << "====== DOF MAP LOCAL -> GLOBAL ====== " << std::endl; @@ -2462,7 +2369,7 @@ namespace AMDiS { } - void ParallelDomainBase::writeMapPeriodic(int rank) + void ParallelDomainBase::printMapPeriodic(int rank) { if (rank == -1 || mpiRank == rank) { std::cout << "====== DOF MAP PERIODIC ====== " << std::endl; @@ -2486,7 +2393,33 @@ namespace AMDiS { coords.print(); } } + } + + + void ParallelDomainBase::printRankDofs(int rank, DofContainer& rankDofs, + DofContainer& rankAllDofs) + { + if (rank == -1 || mpiRank == rank) { + std::cout << "====== RANK DOF INFORMATION ====== " << std::endl; + + std::cout << " RANK OWNED DOFS: " << std::endl; + for (DofContainer::iterator dofit = rankDofs.begin(); + dofit != rankDofs.end(); ++dofit) { + std::cout << " " << **dofit << std::endl; + WorldVector<double> coords; + mesh->getDofIndexCoords(*dofit, feSpace, coords); + coords.print(); + } + std::cout << " RANK ALL DOFS: " << std::endl; + for (DofContainer::iterator dofit = rankAllDofs.begin(); + dofit != rankAllDofs.end(); ++dofit) { + std::cout << " " << **dofit << std::endl; + WorldVector<double> coords; + mesh->getDofIndexCoords(*dofit, feSpace, coords); + coords.print(); + } + } } diff --git a/AMDiS/src/ParallelDomainBase.h b/AMDiS/src/ParallelDomainBase.h index 71c073368bb895b3222af3c4c6e5f9bae1811007..84e0c19874ab270da4e17786a4878bc79f064716 100644 --- a/AMDiS/src/ParallelDomainBase.h +++ b/AMDiS/src/ParallelDomainBase.h @@ -214,6 +214,11 @@ namespace AMDiS { void updateLocalGlobalNumbering(int& nRankDOFs, int& nOverallDOFs); + /** \brief + * Creates to all dofs in rank's partition that are on a periodic boundary the + * mapping from dof index to the other periodic dof indices. This information + * is stored in \ref periodicDof. + */ void createPeriodicMap(); /** \brief @@ -311,7 +316,7 @@ namespace AMDiS { * * \param rank If specified, only the information from the given rank is printed. */ - void writeMapLocalGlobal(int rank = -1); + void printMapLocalGlobal(int rank = -1); /** \brief * This function is used for debugging only. It prints all information about @@ -319,7 +324,18 @@ namespace AMDiS { * * \param rank If specified, only the information from the given rank is printed. */ - void writeMapPeriodic(int rank = -1); + void printMapPeriodic(int rank = -1); + + /** \brief + * This function is used for debugging only. It prints information about dofs + * in rank's partition. + * + * \param rank If specified, only the information from the given + * rank is printed. + * \param rankDofs List of all dofs in ranks partition that are owned by rank. + * \param rankAllDofs List of all dofs in ranks partition. + */ + void printRankDofs(int rank, DofContainer& rankDofs, DofContainer& rankAllDofs); /** \brief * This functions create a Paraview file with the macro mesh where the elements @@ -547,6 +563,11 @@ namespace AMDiS { */ DofToBool vertexDof; + /** \brief + * If periodic boundaries are used, this map stores to each dof in rank's + * partition, that is on periodic boundaries, the corresponding periodic dofs. + * The mapping is defined by using global dof indices. + */ PeriodicDofMap periodicDof; /// Is the index of the first row of the linear system, which is owned by the rank. diff --git a/AMDiS/src/ParallelDomainVec.cc b/AMDiS/src/ParallelDomainVec.cc index c9e661b9d38e17638717d38981ac9ac303ddc02a..1cd925470b4d902b58ac5978245ab3c8d424d54d 100644 --- a/AMDiS/src/ParallelDomainVec.cc +++ b/AMDiS/src/ParallelDomainVec.cc @@ -68,15 +68,24 @@ namespace AMDiS { probVec->getSolution()->getDOFVector(i)->setRankDofs(isRankDof); } + // === Remove periodic boundary conditions in sequential problem definition. === + + // Remove periodic boundaries in boundary manager on matrices and vectors. for (int i = 0; i < nComponents; i++) { - for (int j = 0; j < nComponents; j++) - if (probVec->getSystemMatrix(i, j) && probVec->getSystemMatrix(i, j)->getBoundaryManager()) - removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probVec->getSystemMatrix(i, j)->getBoundaryManager()->getBoundaryConditionMap())); + for (int j = 0; j < nComponents; j++) { + DOFMatrix* mat = probVec->getSystemMatrix(i, j); + if (mat && mat->getBoundaryManager()) + removeBoundaryCondition(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap())); + } if (probVec->getSolution()->getDOFVector(i)->getBoundaryManager()) removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probVec->getSolution()->getDOFVector(i)->getBoundaryManager()->getBoundaryConditionMap())); + + if (probVec->getRHS()->getDOFVector(i)->getBoundaryManager()) + removeBoundaryCondition(const_cast<BoundaryIndexMap&>(probVec->getRHS()->getDOFVector(i)->getBoundaryManager()->getBoundaryConditionMap())); } + // Remove periodic boundaries on elements in mesh. TraverseStack stack; ElInfo *elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_EVERY_EL_PREORDER); while (elInfo) { diff --git a/AMDiS/src/ParallelDomainVec.h b/AMDiS/src/ParallelDomainVec.h index 40b4b603f25c389ee7292e432929e5f9b25428da..f1d42e622a53372d9f5966d3e892328a59a340af 100644 --- a/AMDiS/src/ParallelDomainVec.h +++ b/AMDiS/src/ParallelDomainVec.h @@ -63,6 +63,7 @@ namespace AMDiS { /// Starts the solution of the linear system using Petsc. void solve(); + // Removes all periodic boundaries from a given boundary map. void removeBoundaryCondition(BoundaryIndexMap& boundaryMap); protected: