Skip to content
Snippets Groups Projects
Commit 988aefe0 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed FETI-DP problem related to last changes in MeshDistributor

parent 46cf564b
Branches
Tags
No related merge requests found
......@@ -166,11 +166,26 @@ namespace AMDiS {
return feSpaces;
}
/// Returns the number of DOFs in rank's domain for a given FE space.
inline int getNumberRankDofs(const FiniteElemSpace *feSpace)
/** \brief
* Returns the number of DOFs in rank's domain for a given FE space.
*
* \param[in] feSpace If the FE space is defined, the function returns
* the number of DOFs for this FE space. If this
* parameter is not specified, the function assumes
* that there is only one FE space and returns the
* number of DOFs for this one.
*/
inline int getNumberRankDofs(const FiniteElemSpace *feSpace = NULL)
{
FUNCNAME("MeshDistributor::getNumberRankDofs()");
if (feSpace == NULL) {
TEST_EXIT_DBG(dofFeData.size() == 1)
("More than one FE space defined!\n");
return dofFeData.begin()->second.nRankDofs;
}
TEST_EXIT_DBG(dofFeData.count(feSpace))("Should not happen!\n");
return dofFeData[feSpace].nRankDofs;
......@@ -189,7 +204,7 @@ namespace AMDiS {
return result;
}
/// Returns the first global DOF index of an FE space, owned by rank.
inline int getStartDofs(const FiniteElemSpace *feSpace)
{
......@@ -215,11 +230,26 @@ namespace AMDiS {
return result;
}
/// Returns the global number of DOFs for a given FE space.
inline int getNumberOverallDofs(const FiniteElemSpace *feSpace)
/** \brief
* Returns the global number of DOFs for a given FE space.
*
* \param[in] feSpace If the FE space is defined, the function returns
* the number of DOFs for this FE space. If this
* parameter is not specified, the function assumes
* that there is only one FE space and returns the
* number of DOFs for this one.
*/
inline int getNumberOverallDofs(const FiniteElemSpace *feSpace = NULL)
{
FUNCNAME("MeshDistributor::getNumberOverallDofs()");
if (feSpace == NULL) {
TEST_EXIT_DBG(dofFeData.size() == 1)
("More than one FE space defined!\n");
return dofFeData.begin()->second.nOverallDofs;
}
TEST_EXIT_DBG(dofFeData.count(feSpace))("Should not happen!\n");
return dofFeData[feSpace].nOverallDofs;
......
......@@ -220,7 +220,7 @@ namespace AMDiS {
createDuals();
createLagrange();
createIndexB();
}
......@@ -459,17 +459,18 @@ namespace AMDiS {
StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm());
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpace);
!it.end(); it.nextRank())
!it.end(); it.nextRank()) {
for (; !it.endDofIter(); it.nextDof())
if (globalPrimalIndex.count(it.getDofIndex()) == 0) {
TEST_EXIT_DBG(dofFirstLagrange.count(it.getDofIndex()))
("Should not happen!\n");
stdMpi.getSendData(it.getRank()).push_back(dofFirstLagrange[it.getDofIndex()]);
}
}
stdMpi.updateSendDataSize();
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpace);
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpace);
!it.end(); it.nextRank()) {
bool recvData = false;
for (; !it.endDofIter(); it.nextDof())
......@@ -484,7 +485,7 @@ namespace AMDiS {
stdMpi.startCommunication();
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpace);
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpace);
!it.end(); it.nextRank()) {
int counter = 0;
for (; !it.endDofIter(); it.nextDof())
......@@ -980,7 +981,6 @@ namespace AMDiS {
updateDofData();
// === Create matrices for the FETI-DP method. ===
int nRowsRankB = nRankB * nComponents;
......
......@@ -33,6 +33,14 @@ namespace AMDiS {
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscTmpVec);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1) {
VecSetBlockSize(petscSolVec, testddd);
VecSetBlockSize(petscTmpVec, testddd);
}
int recvAllValues = 0;
int sendValue =
static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
......@@ -65,6 +73,11 @@ namespace AMDiS {
nOverallRows, nOverallRows,
0, d_nnz, 0, o_nnz, &petscMatrix);
if (testddd > 1) {
MatSetBlockSize(petscMatrix, testddd);
MSG("MAT SET BLOCK SIZE: %d\n", testddd);
}
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
......@@ -124,6 +137,12 @@ namespace AMDiS {
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscRhsVec);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1)
VecSetBlockSize(petscRhsVec, testddd);
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < vec->getSize(); i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), i);
......@@ -675,8 +694,12 @@ namespace AMDiS {
if (meshDistributor->getIsRankDof(feSpaces[i], **it)) {
int globalIndex =
meshDistributor->mapDofToGlobal(feSpaces[i], **it);
int globalMatIndex =
globalIndex - meshDistributor->getStartDofs(feSpaces[i]) + offset;
int globalMatIndex =
globalIndex - meshDistributor->getStartDofs(feSpaces[i]) + offset;
// int globalMatIndex =
// globalIndex * feSpaces.size() + i;
dofToMatIndex.add(i, globalIndex, globalMatIndex);
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment