From 23da31142573ba00983f3bb5ea3e1823a5eb02a5 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Mon, 5 Oct 2009 13:04:39 +0000
Subject: [PATCH] Bugfix in parallel domain decomposition with higher order
 elements.

---
 AMDiS/src/ParallelDomainBase.cc | 36 +++++++++++++++++++++------------
 AMDiS/src/ParallelDomainBase.h  |  3 ++-
 2 files changed, 25 insertions(+), 14 deletions(-)

diff --git a/AMDiS/src/ParallelDomainBase.cc b/AMDiS/src/ParallelDomainBase.cc
index 7a5adc29..4733416c 100644
--- a/AMDiS/src/ParallelDomainBase.cc
+++ b/AMDiS/src/ParallelDomainBase.cc
@@ -241,7 +241,7 @@ namespace AMDiS {
   }
 
 
-  void ParallelDomainBase::setDofVector(DOFVector<double>* vec, 
+  void ParallelDomainBase::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
 					int dispMult, int dispAdd)
   {
     DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
@@ -249,7 +249,7 @@ namespace AMDiS {
       int index = mapLocalGlobalDOFs[dofIt.getDOFIndex()] * dispMult + dispAdd;
       double value = *dofIt;
 
-      VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
+      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
     }    
   }
 
@@ -277,7 +277,7 @@ namespace AMDiS {
     MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
     MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
 
-    setDofVector(vec);
+    setDofVector(petscRhsVec, vec);
 
     VecAssemblyBegin(petscRhsVec);
     VecAssemblyEnd(petscRhsVec);
@@ -512,7 +512,7 @@ namespace AMDiS {
     MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
 
     for (int i = 0; i < nComponents; i++)
-      setDofVector(vec->getDOFVector(i), nComponents, i);
+      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
 
     VecAssemblyBegin(petscRhsVec);
     VecAssemblyEnd(petscRhsVec);
@@ -602,6 +602,15 @@ namespace AMDiS {
   {
     FUNCNAME("ParallelDomainBase::solvePetscMatrix()");
 
+#if 0
+    // Set old solution to be initiual guess for petsc solver.
+    for (int i = 0; i < nComponents; i++)
+      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);
+
+    VecAssemblyBegin(petscSolVec);
+    VecAssemblyEnd(petscSolVec);
+#endif
+
     KSP solver;
 
     KSPCreate(PETSC_COMM_WORLD, &solver);
@@ -612,6 +621,8 @@ namespace AMDiS {
     KSPSetType(solver, KSPBCGS);
     KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
     KSPSetFromOptions(solver);
+    // Do not delete the solution vector, use it for the initial guess.
+    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
 
     KSPSolve(solver, petscRhsVec, petscSolVec);
 
@@ -1188,10 +1199,11 @@ namespace AMDiS {
 
     ElementDofIterator elDofIt(feSpace);
     DofSet rankDofSet;
-
+    
     // The vertexDof list must be recreated from the scratch. Otherwise, it is possible
     // that it maps dofs, that were removed (this is also possible, if the mesh was
     // refined, e.g., center dofs of an element are not dofs of the children).
+    DofToBool oldVertexDof = vertexDof;
     vertexDof.clear();
 
     TraverseStack stack;
@@ -1237,7 +1249,7 @@ namespace AMDiS {
 
 	for (DofContainer::iterator iit = oldSendDofs[it->first].begin(); 
 	     iit != oldSendDofs[it->first].end(); ++iit)
-	  if (vertexDof[*iit])
+	  if (oldVertexDof[*iit])
 	    dofsToSend.push_back(*iit);
 
 	DofContainer dofs;	
@@ -1265,7 +1277,7 @@ namespace AMDiS {
 
 	for (DofContainer::iterator iit = oldRecvDofs[it->first].begin(); 
 	     iit != oldRecvDofs[it->first].end(); ++iit)
-	  if (vertexDof[*iit]) {
+	  if (oldVertexDof[*iit]) {
 	    dofsToRecv.push_back(*iit);
 
 	    DofContainer::iterator eraseIt = find(rankDofs.begin(), rankDofs.end(), *iit);
@@ -1334,7 +1346,6 @@ namespace AMDiS {
       i++;
     }
 
-
     // === Send new DOF indices. ===
 
     std::vector<int*> sendBuffers(sendDofs.size());
@@ -1350,12 +1361,11 @@ namespace AMDiS {
       sendBuffers[i] = new int[nSendDofs];
       int c = 0;
       for (DofContainer::iterator dofIt = sendIt->second.begin();
-	   dofIt != sendIt->second.end(); ++dofIt) {
+	   dofIt != sendIt->second.end(); ++dofIt)
 	sendBuffers[i][c++] = rankDofsNewGlobalIndex[*dofIt];
-      }
 
       request[requestCounter++] = 
-	mpiComm.Isend(sendBuffers[i], nSendDofs, MPI_INT, sendIt->first, 0);
+ 	mpiComm.Isend(sendBuffers[i], nSendDofs, MPI_INT, sendIt->first, 0);
     }
 
     i = 0;
@@ -1363,9 +1373,9 @@ namespace AMDiS {
 	 recvIt != recvDofs.end(); ++recvIt, i++) {
       int nRecvDofs = recvIt->second.size();
       recvBuffers[i] = new int[nRecvDofs];
-	
+
       request[requestCounter++] = 
-	mpiComm.Irecv(recvBuffers[i], nRecvDofs, MPI_INT, recvIt->first, 0);
+ 	mpiComm.Irecv(recvBuffers[i], nRecvDofs, MPI_INT, recvIt->first, 0);
     }
 
     MPI::Request::Waitall(requestCounter, request);
diff --git a/AMDiS/src/ParallelDomainBase.h b/AMDiS/src/ParallelDomainBase.h
index 76916221..6e9ff028 100644
--- a/AMDiS/src/ParallelDomainBase.h
+++ b/AMDiS/src/ParallelDomainBase.h
@@ -254,7 +254,8 @@ namespace AMDiS {
     void setDofMatrix(DOFMatrix* mat, int dispMult = 1, 
 		      int dispAddRow = 0, int dispAddCol = 0);
 
-    void setDofVector(DOFVector<double>* vec, int disMult = 1, int dispAdd = 0);
+    void setDofVector(Vec& petscVec, DOFVector<double>* vec, 
+		      int disMult = 1, int dispAdd = 0);
 
     void DbgCreateElementMap(ElementIdxToDofs &elMap);
     
-- 
GitLab