From c2460445c0e487c70aa5a06526a7435b6953c258 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Tue, 20 Jul 2010 06:46:31 +0000
Subject: [PATCH] Fixed parallel dirichlet boundary conditions problem.

---
 AMDiS/src/DOFMatrix.cc                   |  9 ++-------
 AMDiS/src/parallel/GlobalMatrixSolver.cc | 14 +++++++++++++-
 AMDiS/src/parallel/MeshDistributor.cc    |  2 +-
 3 files changed, 16 insertions(+), 9 deletions(-)

diff --git a/AMDiS/src/DOFMatrix.cc b/AMDiS/src/DOFMatrix.cc
index 4533d159..519aa4bb 100644
--- a/AMDiS/src/DOFMatrix.cc
+++ b/AMDiS/src/DOFMatrix.cc
@@ -210,8 +210,8 @@ namespace AMDiS {
       if (condition && condition->isDirichlet()) {
 	if (condition->applyBoundaryCondition()) {
 #ifdef HAVE_PARALLEL_DOMAIN_AMDIS
- 	  if ((*rankDofs)[rowIndices[i]]) 
- 	    applyDBCs.insert(static_cast<int>(row));
+	  if ((*rankDofs)[rowIndices[i]]) 
+	    applyDBCs.insert(static_cast<int>(row));
 #else
 	  applyDBCs.insert(static_cast<int>(row));
 #endif
@@ -219,11 +219,6 @@ namespace AMDiS {
       } else {
 	for (int j = 0; j < nCol; j++) {
 	  DegreeOfFreedom col = colIndices[j];
-
-// 	  if (MPI::COMM_WORLD.Get_rank() == 0  && row <= 10 && col <= 10) {
-// 	    MSG("%d/%d entry: %e\n", row, col, elMat[i][j]);
-// 	  }
-
 	  ins[row][col] += elMat[i][j];
 	}
       }
diff --git a/AMDiS/src/parallel/GlobalMatrixSolver.cc b/AMDiS/src/parallel/GlobalMatrixSolver.cc
index 66336ba9..21b3953a 100644
--- a/AMDiS/src/parallel/GlobalMatrixSolver.cc
+++ b/AMDiS/src/parallel/GlobalMatrixSolver.cc
@@ -247,6 +247,15 @@ namespace AMDiS {
     // owner of the row DOFs.
     std::map<int, MatrixNnzEntry> sendMatrixEntry;
 
+
+    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
+    typedef std::map<int, DofContainer> RankToDofContainer;
+    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
+    for (RankToDofContainer::iterator it = recvDofs.begin();
+	 it != recvDofs.end(); ++it)
+      sendMatrixEntry[it->first].resize(0);
+
+
     for (int i = 0; i < nComponents; i++) {
       for (int j = 0; j < nComponents; j++) {
  	if ((*mat)[i][j]) {
@@ -347,6 +356,7 @@ namespace AMDiS {
     stdMpi.recv(meshDistributor->getSendDofs());
     stdMpi.startCommunication<int>(MPI_INT);
 
+
     // === Evaluate the nnz structure this rank got from other ranks and add it to ===
     // === the PETSc nnz data structure.                                           ===
 
@@ -412,10 +422,12 @@ namespace AMDiS {
       lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
     }
 
+
     // === Create PETSc matrix with the computed nnz data structure. ===
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
-  		    0, d_nnz, 0, o_nnz, &petscMatrix);
+		    //		    0, PETSC_NULL, 0, PETSC_NULL, &petscMatrix);
+		    0, d_nnz, 0, o_nnz, &petscMatrix);
     
 #if (DEBUG != 0)
     INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", TIME_USED(first, clock()));
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 40c26d09..59e5c49c 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -432,7 +432,7 @@ namespace AMDiS {
 	for (int k = 0; k < nComponents; k++) {
 	  DOFMatrix* mat = probStat[i]->getSystemMatrix(j, k);
 	  if (mat && mat->getBoundaryManager())
-	    removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));	  
+	    removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));
 	}
 	
 	if (probStat[i]->getSolution()->getDOFVector(j)->getBoundaryManager())
-- 
GitLab