Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
da4acc42
Commit
da4acc42
authored
Oct 16, 2012
by
Thomas Witkowski
Browse files
Add support for BDDCML 2.0 and work on scalable FETI-DP in 3d using arithmetic constraints.
parent
f99a87c8
Changes
17
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/DOFMatrix.cc
View file @
da4acc42
...
...
@@ -26,9 +26,6 @@
#include
"BoundaryManager.h"
#include
"Assembler.h"
#include
"Serializer.h"
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#include
"parallel/ParallelDofMapping.h"
#endif
namespace
AMDiS
{
...
...
@@ -209,21 +206,6 @@ namespace AMDiS {
using
namespace
mtl
;
#if 0
if (MPI::COMM_WORLD.Get_rank() == 0) {
std::cout << "----- PRINT MAT " << rowElInfo->getElement()->getIndex() << "--------" << std::endl;
std::cout << elMat << std::endl;
std::cout << "rows: ";
for (int i = 0; i < rowIndices.size(); i++)
std::cout << rowIndices[i] << " ";
std::cout << std::endl;
std::cout << "cols: ";
for (int i = 0; i < colIndices.size(); i++)
std::cout << colIndices[i] << " ";
std::cout << std::endl;
}
#endif
for
(
int
i
=
0
;
i
<
nRow
;
i
++
)
{
DegreeOfFreedom
row
=
rowIndices
[
i
];
...
...
@@ -552,15 +534,12 @@ namespace AMDiS {
// Do the following only in sequential code. In parallel mode, the specific
// solver method must care about dirichlet boundary conditions.
#ifndef HAVE_PARALLEL_DOMAIN_AMDIS
inserter_type
&
ins
=
*
inserter
;
for
(
std
::
set
<
int
>::
iterator
it
=
dirichletDofs
.
begin
();
it
!=
dirichletDofs
.
end
();
++
it
)
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
// if (dofMap->isRankDof(*it))
if
(
dofMap
->
isSet
(
*
it
))
ins
[
*
it
][
*
it
]
=
1.0
;
#endif
ins
[
*
it
][
*
it
]
=
1.0
;
}
...
...
@@ -583,12 +562,4 @@ namespace AMDiS {
dirichletDofs
.
clear
();
}
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
void
DOFMatrix
::
setDofMap
(
FeSpaceDofMap
&
m
)
{
dofMap
=
&
m
;
}
#endif
}
AMDiS/src/DOFMatrix.h
View file @
da4acc42
...
...
@@ -415,10 +415,6 @@ namespace AMDiS {
///
int
memsize
();
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
void
setDofMap
(
FeSpaceDofMap
&
m
);
#endif
protected:
/// Pointer to a FiniteElemSpace with information about corresponding row DOFs
/// and basis functions
...
...
@@ -480,13 +476,6 @@ namespace AMDiS {
/// elemnts during assembling.
int
nnzPerRow
;
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
/// Is used in parallel computations to check whether specific DOFs in the
/// row FE spaces are owned by the rank or not. This is used to ensure that
/// Dirichlet BC is handled correctly in parallel computations.
FeSpaceDofMap
*
dofMap
;
#endif
/// Inserter object: implemented as pointer, allocated and deallocated as needed
inserter_type
*
inserter
;
...
...
AMDiS/src/DOFVector.cc
View file @
da4acc42
...
...
@@ -285,10 +285,10 @@ namespace AMDiS {
elInfo
=
stack
.
traverseNext
(
elInfo
);
}
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
double
localResult
=
result
;
MPI
::
COMM_WORLD
.
Allreduce
(
&
localResult
,
&
result
,
1
,
MPI_DOUBLE
,
MPI_SUM
);
#endif
#endif
return
result
;
}
...
...
AMDiS/src/DOFVector.h
View file @
da4acc42
...
...
@@ -42,10 +42,6 @@
#include
"BasisFunction.h"
#include
"FiniteElemSpace.h"
#include
"SurfaceQuadrature.h"
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#include
<mpi.h>
#include
"parallel/ParallelDofMapping.h"
#endif
namespace
AMDiS
{
...
...
@@ -61,11 +57,7 @@ namespace AMDiS {
elementVector
(
3
),
boundaryManager
(
NULL
),
nBasFcts
(
0
)
{
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
dofMap
=
NULL
;
#endif
}
{}
DOFVectorBase
(
const
FiniteElemSpace
*
f
,
string
n
);
...
...
@@ -248,21 +240,6 @@ namespace AMDiS {
return
dirichletDofValues
;
}
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
void
setDofMap
(
FeSpaceDofMap
&
m
)
{
dofMap
=
&
m
;
}
inline
bool
isRankDof
(
DegreeOfFreedom
dof
)
{
TEST_EXIT_DBG
(
dofMap
)(
"No rank dofs set!
\n
"
);
return
dofMap
->
isRankDof
(
dof
);
}
#endif
protected:
///
const
FiniteElemSpace
*
feSpace
;
...
...
@@ -292,10 +269,6 @@ namespace AMDiS {
int
dim
;
map
<
DegreeOfFreedom
,
T
>
dirichletDofValues
;
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
FeSpaceDofMap
*
dofMap
;
#endif
};
...
...
AMDiS/src/DOFVector.hh
View file @
da4acc42
...
...
@@ -1060,10 +1060,6 @@ namespace AMDiS {
this
->
boundaryManager
=
NULL
;
}
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
this
->
dofMap
=
rhs
.
dofMap
;
#endif
return
*
this
;
}
...
...
AMDiS/src/DirichletBC.cc
View file @
da4acc42
...
...
@@ -64,24 +64,20 @@ namespace AMDiS {
const
BasisFunction
*
basFcts
=
rowFeSpace
->
getBasisFcts
();
for
(
int
i
=
0
;
i
<
nBasFcts
;
i
++
)
{
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
if
(
vector
->
isRankDof
(
dofIndices
[
i
]))
#endif
if
(
localBound
[
i
]
==
boundaryType
)
{
double
value
=
0.0
;
if
(
f
)
{
elInfo
->
coordToWorld
(
*
(
basFcts
->
getCoords
(
i
)),
worldCoords
);
value
=
(
*
f
)(
worldCoords
);
}
else
{
if
(
dofVec
)
value
=
(
*
dofVec
)[
dofIndices
[
i
]];
else
ERROR_EXIT
(
"There is something wrong!
\n
"
);
}
vector
->
setDirichletDofValue
(
dofIndices
[
i
],
value
);
(
*
vector
)[
dofIndices
[
i
]]
=
value
;
if
(
localBound
[
i
]
==
boundaryType
)
{
double
value
=
0.0
;
if
(
f
)
{
elInfo
->
coordToWorld
(
*
(
basFcts
->
getCoords
(
i
)),
worldCoords
);
value
=
(
*
f
)(
worldCoords
);
}
else
{
if
(
dofVec
)
value
=
(
*
dofVec
)[
dofIndices
[
i
]];
else
ERROR_EXIT
(
"There is something wrong!
\n
"
);
}
vector
->
setDirichletDofValue
(
dofIndices
[
i
],
value
);
(
*
vector
)[
dofIndices
[
i
]]
=
value
;
}
}
}
...
...
AMDiS/src/parallel/BddcMlSolver.cc
View file @
da4acc42
...
...
@@ -232,6 +232,7 @@ namespace AMDiS {
// matrix type (set here to unsymmetric)
int
matrixtype
=
0
;
Parameters
::
get
(
"parallel->bddcml->matrix type"
,
matrixtype
);
// Non zero structure of matrix
vector
<
int
>
i_sparse
;
...
...
@@ -352,12 +353,14 @@ namespace AMDiS {
&
luser_constraints2
);
int
use_defaults_int
=
1
;
int
use_defaults_int
=
0
;
int
parallel_division_int
=
1
;
int
use_arithmetic_int
=
0
;
int
use_arithmetic_int
=
1
;
int
use_adaptive_int
=
0
;
int
use_user_constraint_int
=
0
;
Parameters
::
get
(
"parallel->bddcml->arithmetic constraints"
,
use_arithmetic_int
);
// MSG("call to \"bddcml_setup_preconditioner\" with the following arguments (each in one line):\n");
// MSG(" %d\n", matrixtype);
// MSG(" %d\n", use_defaults_int);
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
da4acc42
...
...
@@ -189,8 +189,6 @@ namespace AMDiS {
updateLocalGlobalNumbering
();
setRankDofs
();
elObjDb
.
setData
(
partitionMap
,
levelData
);
#if (DEBUG != 0)
...
...
@@ -351,9 +349,6 @@ namespace AMDiS {
#endif
}
// Set DOF rank information to all matrices and vectors.
setRankDofs
();
// And delete some data, we there is no mesh adaptivty.
if
(
!
meshAdaptivity
)
elObjDb
.
clear
();
...
...
@@ -475,10 +470,8 @@ namespace AMDiS {
// DOFs object to the matrices and vectors of the added stationary problem,
// and to remove the periodic boundary conditions on these objects.
if
(
initialized
)
{
setRankDofs
(
probStat
,
dofMap
);
if
(
initialized
)
removePeriodicBoundaryConditions
(
probStat
);
}
}
...
...
@@ -731,49 +724,6 @@ namespace AMDiS {
}
void
MeshDistributor
::
setRankDofs
(
ProblemStatSeq
*
probStat
,
ParallelDofMapping
&
rankDofMap
)
{
FUNCNAME
(
"MeshDistributor::setRankDofs()"
);
int
nComponents
=
probStat
->
getNumComponents
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
(
probStat
->
getSystemMatrix
(
i
,
j
))
{
const
FiniteElemSpace
*
rowFeSpace
=
probStat
->
getSystemMatrix
(
i
,
j
)
->
getRowFeSpace
();
TEST_EXIT
(
find
(
feSpaces
.
begin
(),
feSpaces
.
end
(),
rowFeSpace
)
!=
feSpaces
.
end
())
(
"Should not happen!
\n
"
);
probStat
->
getSystemMatrix
(
i
,
j
)
->
setDofMap
(
rankDofMap
[
rowFeSpace
]);
}
TEST_EXIT_DBG
(
probStat
->
getRhs
()
->
getDOFVector
(
i
))(
"No RHS vector!
\n
"
);
TEST_EXIT_DBG
(
probStat
->
getSolution
()
->
getDOFVector
(
i
))
(
"No solution vector!
\n
"
);
TEST_EXIT_DBG
(
probStat
->
getRhsVector
(
i
)
->
getFeSpace
()
==
probStat
->
getSolution
(
i
)
->
getFeSpace
())
(
"Should not happen!
\n
"
);
const
FiniteElemSpace
*
feSpace
=
probStat
->
getRhsVector
(
i
)
->
getFeSpace
();
TEST_EXIT
(
find
(
feSpaces
.
begin
(),
feSpaces
.
end
(),
feSpace
)
!=
feSpaces
.
end
())
(
"Should not happen!
\n
"
);
probStat
->
getRhsVector
(
i
)
->
setDofMap
(
rankDofMap
[
feSpace
]);
probStat
->
getSolution
(
i
)
->
setDofMap
(
rankDofMap
[
feSpace
]);
}
}
void
MeshDistributor
::
setRankDofs
()
{
for
(
unsigned
int
i
=
0
;
i
<
problemStat
.
size
();
i
++
)
setRankDofs
(
problemStat
[
i
],
dofMap
);
}
void
MeshDistributor
::
removePeriodicBoundaryConditions
()
{
FUNCNAME
(
"MeshDistributor::removePeriodicBoundaryConditions()"
);
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
da4acc42
...
...
@@ -347,14 +347,6 @@ namespace AMDiS {
DofComm
&
dcom
,
const
FiniteElemSpace
*
feSpace
);
/// Sets \ref isRankDof to all matrices and rhs vectors in a given
/// stationary problem.
void
setRankDofs
(
ProblemStatSeq
*
probStat
,
ParallelDofMapping
&
rankDofMap
);
/// Sets \ref isRankDof to all matrices and rhs vectors in all
/// stationary problems.
void
setRankDofs
();
protected:
void
addProblemStat
(
ProblemStatSeq
*
probStat
);
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
da4acc42
...
...
@@ -76,24 +76,6 @@ namespace AMDiS {
}
void
PetscProblemStat
::
buildAfterCoarsen
(
AdaptInfo
*
adaptInfo
,
Flag
flag
,
bool
assembleMatrix
,
bool
assembleVector
)
{
FUNCNAME
(
"ParallelProblemStatBase::buildAfterCoarsen()"
);
TEST_EXIT_DBG
(
MeshDistributor
::
globalMeshDistributor
!=
NULL
)
(
"Should not happen!
\n
"
);
MeshDistributor
::
globalMeshDistributor
->
checkMeshChange
();
MeshDistributor
::
globalMeshDistributor
->
setRankDofs
(
this
,
petscSolver
->
getInteriorMap
());
ProblemStatSeq
::
buildAfterCoarsen
(
adaptInfo
,
flag
,
assembleMatrix
,
assembleVector
);
}
void
PetscProblemStat
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
,
bool
storeMatrixData
)
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
da4acc42
...
...
@@ -49,10 +49,6 @@ namespace AMDiS {
delete
petscSolver
;
}
void
buildAfterCoarsen
(
AdaptInfo
*
adaptInfo
,
Flag
flag
,
bool
assembleMatrix
,
bool
assembleVector
);
void
initialize
(
Flag
initFlag
,
ProblemStatSeq
*
adoptProblem
=
NULL
,
Flag
adoptFlag
=
INIT_NOTHING
);
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
da4acc42
...
...
@@ -23,9 +23,10 @@ namespace AMDiS {
PetscSolver
::
PetscSolver
()
:
ParallelCoarseSpaceMatVec
(),
kspPrefix
(
""
),
removeRhsNullspace
(
false
),
removeRhsNullspace
(
false
),
hasConstantNullspace
(
false
),
isSymmetric
(
false
),
ha
sConstantNullspace
(
fals
e
)
ha
ndleDirichletRows
(
tru
e
)
{
string
kspStr
=
""
;
Parameters
::
get
(
"parallel->solver->petsc->ksp"
,
kspStr
);
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
da4acc42
...
...
@@ -62,7 +62,8 @@ namespace AMDiS {
*/
virtual
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
=
0
;
///
/// This function is just a small wrapper that creates a 1x1 matrix that
/// contains exactly one DOFMatrix and than calls \ref fillPetscMatrix
void
fillPetscMatrix
(
DOFMatrix
*
mat
);
/** \brief
...
...
@@ -85,11 +86,6 @@ namespace AMDiS {
/// Detroys all vector data structures.
virtual
void
destroyVectorData
()
=
0
;
virtual
ParallelDofMapping
&
getInteriorMap
()
{
return
meshDistributor
->
getDofMap
();
}
virtual
Flag
getBoundaryDofRequirement
()
{
return
0
;
...
...
@@ -139,6 +135,12 @@ namespace AMDiS {
constNullspaceComponent
.
push_back
(
component
);
}
/// Informs the solver whether is has to handle dirichlet rows or not.
void
setHandleDirichletRows
(
bool
b
)
{
handleDirichletRows
=
b
;
}
protected:
/** \brief
* Copies between to PETSc vectors by using different index sets for the
...
...
@@ -177,6 +179,12 @@ namespace AMDiS {
bool
isSymmetric
;
/// If true, dirichlet rows are handled by the solver correspondently. To
/// set this value to false makes only sense, of this solver is just used
/// as a subsolver and the main solver above alread handles dirichlet rows
/// in some way.
bool
handleDirichletRows
;
vector
<
int
>
constNullspaceComponent
;
};
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
da4acc42
...
...
@@ -101,6 +101,7 @@ namespace AMDiS {
if
(
subdomain
==
NULL
)
{
subdomain
=
new
PetscSolverGlobalMatrix
();
subdomain
->
setSymmetric
(
isSymmetric
);
subdomain
->
setHandleDirichletRows
(
false
);
if
(
meshLevel
==
0
)
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
...
...
@@ -134,11 +135,6 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createDirichletData()"
);
bool
removeDirichletRows
=
false
;
Parameters
::
get
(
"parallel->feti->remove dirichlet"
,
removeDirichletRows
);
if
(
!
removeDirichletRows
)
return
;
int
nComponents
=
mat
.
getSize
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFMatrix
*
dofMat
=
mat
[
i
][
i
];
...
...
@@ -735,13 +731,15 @@ namespace AMDiS {
InteriorBoundary
&
intBound
=
meshDistributor
->
getIntBoundary
();
std
::
set
<
BoundaryObject
>
allEdges
;
for
(
InteriorBoundary
::
iterator
it
(
intBound
.
getOwn
());
!
it
.
end
();
++
it
)
{
if
(
it
->
rankObj
.
subObj
==
EDGE
&&
testWirebasketEdge
(
it
->
rankObj
,
feSpaces
[
0
])
&&
if
((
it
->
rankObj
.
subObj
==
FACE
||
(
it
->
rankObj
.
subObj
==
EDGE
&&
testWirebasketEdge
(
it
->
rankObj
,
feSpaces
[
0
])))
&&
allEdges
.
count
(
it
->
rankObj
)
==
0
)
{
bool
dirichletOnlyEdge
=
true
;
DofContainer
edgeDofs
;
it
->
rankObj
.
el
->
getAllDofs
(
feSpaces
[
0
],
it
->
rankObj
,
edgeDofs
);
for
(
DofContainer
::
iterator
dit
=
edgeDofs
.
begin
();
dit
!=
edgeDofs
.
end
();
++
dit
)
{
if
(
dirichletRows
[
feSpaces
[
0
]].
count
(
**
dit
)
==
0
)
{
...
...
@@ -919,12 +917,10 @@ namespace AMDiS {
if
(
creationMode
==
0
)
{
// matK = inv(A_BB) A_BPi
Mat
matK
;
MSG
(
"START SOLVE!
\n
"
);
petsc_helper
::
blockMatMatSolve
(
subdomain
->
getSolver
(),
subdomain
->
getMatInteriorCoarse
(),
matK
);
MSG
(
"END SOLVE!
\n
"
);
// mat_schur_primal = A_PiPi - A_PiB inv(A_BB) A_BPi
// = A_PiPi - A_PiB matK
...
...
AMDiS/src/parallel/PetscSolverFeti.h
View file @
da4acc42
...
...
@@ -236,11 +236,6 @@ namespace AMDiS {
/// order of: 1/h^2
void
createH2vec
(
DOFVector
<
double
>
&
vec
);
virtual
ParallelDofMapping
&
getInteriorMap
()
{
return
localDofMap
;
}
protected:
/// Mapping from primal DOF indices to a global index of primals.
ParallelDofMapping
primalDofMap
;
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
da4acc42
...
...
@@ -82,17 +82,24 @@ namespace AMDiS {
if
(
!
zeroStartVector
)
KSPSetInitialGuessNonzero
(
kspInterior
,
PETSC_TRUE
);
removeDirichletRows
(
seqMat
,
meshDistributor
->
getDofMap
(),
getMatInterior
());
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 3 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
}
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
DOFMatrix
*
mat
)
{
Matrix
<
DOFMatrix
*>
m
(
1
,
1
);
m
[
0
][
0
]
=
mat
;
fillPetscMatrix
(
&
m
);
// === For debugging allow to write the matrix to a file. ===
bool
dbgWriteMatrix
=
false
;
Parameters
::
get
(
"parallel->debug->write matrix"
,
dbgWriteMatrix
);
if
(
dbgWriteMatrix
)
{
PetscViewer
matView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"mpi.mat"
,
FILE_MODE_WRITE
,
&
matView
);
MatView
(
getMatInterior
(),
matView
);
PetscViewerDestroy
(
&
matView
);
}
}
...
...
@@ -249,7 +256,7 @@ namespace AMDiS {
else
PCFactorSetMatSolverPackage
(
pcInterior
,
MATSOLVERMUMPS
);
}
KSPSetFromOptions
(
kspInterior
);
KSPSetFromOptions
(
kspInterior
);
}
...
...
@@ -271,6 +278,20 @@ namespace AMDiS {
}
vecRhsAssembly
();
removeDirichletRows
(
vec
,
meshDistributor
->
getDofMap
(),
getVecRhsInterior
());
// === For debugging allow to write the rhs vector to a file. ===
bool
dbgWriteRhs
=
false
;
Parameters
::
get
(
"parallel->debug->write rhs"
,
dbgWriteRhs
);
if
(
dbgWriteRhs
)
{
PetscViewer
vecView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"mpi.vec"
,
FILE_MODE_WRITE
,
&
vecView
);
VecView
(
getVecRhsInterior
(),
vecView
);
PetscViewerDestroy
(
&
vecView
);
}
}
...
...
@@ -449,6 +470,77 @@ namespace AMDiS {
}
void
PetscSolverGlobalMatrix
::
removeDirichletRows
(
Matrix
<
DOFMatrix
*>
*
seqMat
,
ParallelDofMapping
&
dofMap
,
Mat
mpiMat
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::removeDirichletRows()"
);
if
(
!
handleDirichletRows
)
return
;
int
nComponents
=
seqMat
->
getSize
();
vector
<
PetscInt
>
zeroRows
;
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFMatrix
*
dofMat
=
(
*
seqMat
)[
i
][
i
];
if
(
!
dofMat
)
continue
;
const
FiniteElemSpace
*
feSpace
=
dofMat
->
getRowFeSpace
();
std
::
set
<
DegreeOfFreedom
>
&
dRows
=
dofMat
->
getDirichletRows
();
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
dofIt
=
dRows
.
begin
();
dofIt
!=
dRows
.
end
();
++
dofIt
)
{
MultiIndex
multiIndex
;
if
(
dofMap
[
feSpace
].
find
(
*
dofIt
,
multiIndex
)
&&
dofMap
[
feSpace
].
isRankDof
(
*
dofIt
))
zeroRows
.
push_back
(
dofMap
.
getMatIndex
(
i
,
multiIndex
.
global
));
}
}
MatZeroRows
(
mpiMat
,
zeroRows
.
size
(),
&
(
zeroRows
[
0
]),
1.0
,
PETSC_NULL
,
PETSC_NULL
);
PetscViewer
view
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"interior.mat"
,
FILE_MODE_WRITE
,
&
view
);
MatView
(
mpiMat
,
view
);
PetscViewerDestroy
(
&
view
);
}
void
PetscSolverGlobalMatrix
::
removeDirichletRows
(
SystemVector
*
seqVec
,
ParallelDofMapping
&
dofMap
,
Vec
mpiVec
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::removeDirichletRows()"
);
if
(
!
handleDirichletRows
)
return
;
int
nComponents
=
seqVec
->
getSize
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
*
dofVec
=
seqVec
->
getDOFVector
(
i
);
const
FiniteElemSpace
*
feSpace
=
dofVec
->
getFeSpace
();
map
<
DegreeOfFreedom
,
double
>&
dValues
=
dofVec
->
getDirichletValues
();
for
(
map
<
DegreeOfFreedom
,
double
>::
iterator
dofIt
=
dValues
.
begin
();
dofIt
!=
dValues
.
end
();
++
dofIt
)
{
MultiIndex
multiIndex
;
if
(
dofMap
[
feSpace
].
find
(
dofIt
->
first
,
multiIndex
)
&&
dofMap
[
feSpace
].
isRankDof
(
dofIt
->
first
))