From a1d4588901a8e653d5b157b55fec4020260523dc Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Thu, 11 Oct 2012 12:07:14 +0000
Subject: [PATCH] Fixed augmented coarse space, added first support for
 symmetric FETI-DP problems.

---
 AMDiS/src/parallel/PetscSolver.cc             |  1 +
 AMDiS/src/parallel/PetscSolver.h              |  7 ++
 AMDiS/src/parallel/PetscSolverFeti.cc         | 78 ++++++++-----------
 .../src/parallel/PetscSolverFetiOperators.cc  | 16 +---
 AMDiS/src/parallel/PetscSolverFetiStructs.h   |  2 -
 AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 13 +++-
 6 files changed, 53 insertions(+), 64 deletions(-)

diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index a554c166..918685ed 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -24,6 +24,7 @@ namespace AMDiS {
     : ParallelCoarseSpaceMatVec(),      
       kspPrefix(""),
       removeRhsNullspace(false),
+      isSymmetric(false),
       hasConstantNullspace(false)
   {
     string kspStr = "";
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index 759f076e..2dd50a34 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -110,6 +110,11 @@ namespace AMDiS {
       removeRhsNullspace = b;
     }
 
+    void setSymmetric(bool b)
+    {
+      isSymmetric = b;
+    }
+
     /// Adds a new vector to the basis of the operator's nullspace.
     void addNullspaceVector(SystemVector *vec)
     {
@@ -165,6 +170,8 @@ namespace AMDiS {
 
     bool hasConstantNullspace;
 
+    bool isSymmetric;
+
     vector<int> constNullspaceComponent;
   };
 
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 93b6631e..3dd47123 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -72,6 +72,8 @@ namespace AMDiS {
     Parameters::get("parallel->print timings", printTimings);
 
     Parameters::get("parallel->feti->augmented lagrange", augmentedLagrange);
+
+    Parameters::get("parallel->feti->symmetric", isSymmetric);
   }
 
 
@@ -98,6 +100,7 @@ namespace AMDiS {
 			   
     if (subdomain == NULL) {
       subdomain = new PetscSolverGlobalMatrix();
+      subdomain->setSymmetric(isSymmetric);
 
       if (meshLevel == 0) {
 	subdomain->setMeshDistributor(meshDistributor, 
@@ -699,6 +702,7 @@ namespace AMDiS {
     }
   }
 
+
   bool PetscSolverFeti::testWirebasketEdge(BoundaryObject &edge, const FiniteElemSpace *feSpace)
   {
     Element *el = edge.el;
@@ -716,6 +720,7 @@ namespace AMDiS {
     return (counter == 2);
   }
 
+
   void PetscSolverFeti::createMatAugmentedLagrange(vector<const FiniteElemSpace*> &feSpaces)
   {
     FUNCNAME("PetscSolverFeti::createMatAugmentedLagrange()");
@@ -728,18 +733,33 @@ namespace AMDiS {
     nOverallEdges = 0;
     InteriorBoundary &intBound = meshDistributor->getIntBoundary();
     std::set<BoundaryObject> allEdges;
-    for (InteriorBoundary::iterator it(intBound.getOwn()); !it.end(); ++it)
+    for (InteriorBoundary::iterator it(intBound.getOwn()); !it.end(); ++it) {
       if (it->rankObj.subObj == EDGE && 
 	  testWirebasketEdge(it->rankObj, feSpaces[0]) && 
-	  allEdges.count(it->rankObj) == 0)
-	allEdges.insert(it->rankObj);
+	  allEdges.count(it->rankObj) == 0) {
+	bool dirichletOnlyEdge = true;
+
+	DofContainer edgeDofs;
+	it->rankObj.el->getAllDofs(feSpaces[0], it->rankObj, edgeDofs);
+	for (DofContainer::iterator dit = edgeDofs.begin();
+	     dit != edgeDofs.end(); ++dit) {
+	  if (dirichletRows[feSpaces[0]].count(**dit) == 0) {
+	    dirichletOnlyEdge = false;
+	    break;
+	  }
+	}
+
+	if (!dirichletOnlyEdge)
+	  allEdges.insert(it->rankObj);
+      }
+    }
 
     nRankEdges = allEdges.size();    
     int rStartEdges = 0;
     mpi::getDofNumbering(mpiCommGlobal, nRankEdges, rStartEdges, nOverallEdges);
 
     MSG("nRankEdges = %d, nOverallEdges = %d\n", nRankEdges, nOverallEdges);
-
+    
     nRankEdges *= feSpaces.size();
     rStartEdges *= feSpaces.size();
     nOverallEdges *= feSpaces.size();
@@ -754,13 +774,10 @@ namespace AMDiS {
     int rowCounter = rStartEdges;
     for (std::set<BoundaryObject>::iterator edgeIt = allEdges.begin(); 
 	 edgeIt != allEdges.end(); ++edgeIt) {
-
       for (int i = 0; i < feSpaces.size(); i++) {
 	DofContainer edgeDofs;
 	edgeIt->el->getAllDofs(feSpaces[i], *edgeIt, edgeDofs);
 	
-	MSG("SIZE = %d\n", edgeDofs.size());
-	
 	for (DofContainer::iterator it = edgeDofs.begin();
 	     it != edgeDofs.end(); ++it) {
 	  TEST_EXIT_DBG(isPrimal(feSpaces[i], **it) == false)
@@ -1032,38 +1049,7 @@ namespace AMDiS {
       MatShellSetOperation(tmp, MATOP_MULT, 
 			   (void(*)(void))petscMultMatSchurPrimalAugmented);
 
-      schurPrimalAugmentedData.testMode = false;
       MatComputeExplicitOperator(tmp, &mat_schur_primal);
-      schurPrimalAugmentedData.testMode = true;
-
-      {
-	Vec tvec0, tvec1;
-	VecCreateMPI(PETSC_COMM_WORLD, 
-		     primalDofMap.getRankDofs() + nRankEdges,
-		     primalDofMap.getOverallDofs() + nOverallEdges, 
-		     &tvec0);
-	VecDuplicate(tvec0, &tvec1);
-
-	if (meshDistributor->getMpiRank() == 0)
-	  VecSetValue(tvec0, 12, 0.0, INSERT_VALUES);
-
-	VecAssemblyBegin(tvec0);
-	VecAssemblyEnd(tvec1);
-
-	MatMult(tmp, tvec0, tvec1);
-	
-	//	VecView(tvec1, PETSC_VIEWER_STDOUT_WORLD);
-
-	PetscReal n, a, b;
-	VecNorm(tvec1, NORM_2, &n);
-	VecMax(tvec1, PETSC_NULL, &a);
-	VecMin(tvec1, PETSC_NULL, &b);
-
-	MSG("DIE WERTE SIND %e %e %e\n", n, a, b);
-      }
-
-
-      //      MatView(mat_schur_primal, PETSC_VIEWER_STDOUT_WORLD);
 
       MatDestroy(&tmp);
       schurPrimalAugmentedData.subSolver = NULL;
@@ -1180,8 +1166,13 @@ namespace AMDiS {
       KSPSetType(ksp_interior, KSPPREONLY);
       PC pc_interior;
       KSPGetPC(ksp_interior, &pc_interior);
-      PCSetType(pc_interior, PCLU);
-      PCFactorSetMatSolverPackage(pc_interior, MATSOLVERUMFPACK);
+      if (isSymmetric) {
+	PCSetType(pc_interior, PCCHOLESKY);
+	PCFactorSetMatSolverPackage(pc_interior, MATSOLVERMUMPS);
+      } else {
+	PCSetType(pc_interior, PCLU);
+	PCFactorSetMatSolverPackage(pc_interior, MATSOLVERUMFPACK);
+      }
       KSPSetFromOptions(ksp_interior);
             
       fetiDirichletPreconData.mat_lagrange_scaled = &mat_lagrange_scaled;
@@ -1744,10 +1735,7 @@ namespace AMDiS {
     createDirichletData(*mat);
 
     createFetiData();
-
-    //    dirichletRows.clear();
-
-   
+  
     // === Create matrices for the FETI-DP method. ===
 
     if (printTimings)
@@ -1756,6 +1744,8 @@ namespace AMDiS {
   
     subdomain->fillPetscMatrix(mat);
 
+    dbgMatrix();
+
     if (printTimings) {
       MPI::COMM_WORLD.Barrier();
       MSG("FETI-DP timing 02: %.5f seconds (creation of interior matrices)\n",
diff --git a/AMDiS/src/parallel/PetscSolverFetiOperators.cc b/AMDiS/src/parallel/PetscSolverFetiOperators.cc
index f035848b..cd9c94c3 100644
--- a/AMDiS/src/parallel/PetscSolverFetiOperators.cc
+++ b/AMDiS/src/parallel/PetscSolverFetiOperators.cc
@@ -66,11 +66,6 @@ namespace AMDiS {
       PetscInt muLocalSize = allLocalSize - primalLocalSize;
       PetscInt muSize = allSize - primalSize;
 
-      if (data->testMode) {
-	VecView(x, PETSC_VIEWER_STDOUT_WORLD);
-	MSG("LOCAL SIZE: %d %d %d\n", allLocalSize, primalLocalSize, muLocalSize);
-      }
-
       VecCreateMPI(PETSC_COMM_WORLD, muLocalSize, muSize, &x_mu);
       VecCreateMPI(PETSC_COMM_WORLD, muLocalSize, muSize, &y_mu);
 
@@ -81,17 +76,10 @@ namespace AMDiS {
       VecGetArray(x_primal, &primalValue);
       VecGetArray(x_mu, &muValue);
 
-      for (int i = 0; i < primalLocalSize; i++) {
-	if (data->testMode && allValue[i] != 1.0)
-	  MSG("ONE IS in %d ith local primal value!\n", i);
+      for (int i = 0; i < primalLocalSize; i++)
 	primalValue[i] = allValue[i];
-      }
-      for (int i = 0; i < muLocalSize; i++) {
-	if (data->testMode && allValue[primalLocalSize + i] != 0.0)
-	  MSG("ONE IS in %d ith local mu value!\n", primalLocalSize + i);
-
+      for (int i = 0; i < muLocalSize; i++)
 	muValue[i] = allValue[primalLocalSize + i];
-      }
 
       VecRestoreArray(x, &allValue);
       VecRestoreArray(x_primal, &primalValue);
diff --git a/AMDiS/src/parallel/PetscSolverFetiStructs.h b/AMDiS/src/parallel/PetscSolverFetiStructs.h
index ff116a68..8fa97340 100644
--- a/AMDiS/src/parallel/PetscSolverFetiStructs.h
+++ b/AMDiS/src/parallel/PetscSolverFetiStructs.h
@@ -65,8 +65,6 @@ namespace AMDiS {
     PetscSolver* subSolver;
 
     bool nestedVec;
-
-    bool testMode;
   };
 
 
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index beecd450..3fc15695 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -239,11 +239,16 @@ namespace AMDiS {
     KSPSetOptionsPrefix(kspInterior, "interior_");
     KSPSetType(kspInterior, KSPPREONLY);
     KSPGetPC(kspInterior, &pcInterior);
-    PCSetType(pcInterior, PCLU);
-    if (subdomainLevel == 0)
-      PCFactorSetMatSolverPackage(pcInterior, MATSOLVERUMFPACK);
-    else
+    if (isSymmetric) {
+      PCSetType(pcInterior, PCCHOLESKY);
       PCFactorSetMatSolverPackage(pcInterior, MATSOLVERMUMPS);
+    } else {
+      PCSetType(pcInterior, PCLU);
+      if (subdomainLevel == 0)
+	PCFactorSetMatSolverPackage(pcInterior, MATSOLVERUMFPACK);
+      else
+	PCFactorSetMatSolverPackage(pcInterior, MATSOLVERMUMPS);
+    }
     KSPSetFromOptions(kspInterior);  
   }
 
-- 
GitLab