Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
7c30c9c8
Commit
7c30c9c8
authored
Dec 07, 2012
by
Thomas Witkowski
Browse files
Fixed memory issue in Navier-Stokes solver, added new mode for checker partitioner.
parent
74d7b9b5
Changes
22
Expand all
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/DOFVector.cc
View file @
7c30c9c8
...
...
@@ -101,8 +101,14 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nBasFcts
;
i
++
)
uh
[
i
]
=
operator
[](
localIndices
[
i
]);
value
=
basFcts
->
evalUh
(
lambda
,
uh
);
}
else
}
else
{
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
return
0.0
;
#else
throw
(
std
::
runtime_error
(
"Can not eval DOFVector at point p, because point is outside geometry."
));
#endif
}
if
(
oldElInfo
==
NULL
)
delete
elInfo
;
...
...
AMDiS/src/parallel/CheckerPartitioner.cc
View file @
7c30c9c8
...
...
@@ -15,7 +15,35 @@
namespace
AMDiS
{
CheckerPartitioner
::
CheckerPartitioner
(
MPI
::
Intracomm
*
comm
)
:
MeshPartitioner
(
comm
),
mpiRank
(
mpiComm
->
Get_rank
()),
mpiSize
(
mpiComm
->
Get_size
()),
mode
(
0
),
multilevel
(
false
)
{
string
modestr
=
""
;
Parameters
::
get
(
"parallel->partitioner->mode"
,
modestr
);
if
(
modestr
==
"x-stripes"
)
mode
=
1
;
else
if
(
modestr
==
"y-stripes"
)
mode
=
2
;
else
if
(
modestr
==
"z-stripes"
)
mode
=
3
;
else
if
(
modestr
==
"tetrahedron-stripes"
)
mode
=
4
;
else
if
(
modestr
==
"multilevel"
)
multilevel
=
true
;
else
{
if
(
modestr
!=
""
)
{
ERROR_EXIT
(
"No partitioner mode
\"
%s
\"
!
\n
"
,
modestr
.
c_str
());
}
}
}
bool
CheckerPartitioner
::
createInitialPartitioning
()
{
FUNCNAME
(
"CheckerPartitioner::createInitialPartitioning()"
);
...
...
@@ -47,6 +75,13 @@ namespace AMDiS {
TEST_EXIT
(
MPI
::
COMM_WORLD
.
Get_size
()
==
16
)
(
"Multilevel partitioning is implemented for 16 nodes only!
\n
"
);
}
if
(
mode
==
4
)
{
TEST_EXIT
(
mesh
->
getDim
()
==
3
)(
"Works only in 3D!
\n
"
);
createTetrahedronStripes
();
}
int
dim
=
mesh
->
getDim
();
TraverseStack
stack
;
...
...
@@ -114,6 +149,31 @@ namespace AMDiS {
break
;
case
4
:
// tetrahedron-stripes
{
int
inStripe
=
-
1
;
int
stripePos
=
-
1
;
for
(
int
stripe
=
0
;
stripe
<
elInStripe
.
size
();
stripe
++
)
{
for
(
int
pos
=
0
;
pos
<
elInStripe
[
stripe
].
size
();
pos
++
)
{
if
(
elInStripe
[
stripe
][
pos
]
==
elIndex
)
{
inStripe
=
stripe
;
stripePos
=
pos
;
break
;
}
}
if
(
inStripe
>=
0
)
break
;
}
TEST_EXIT
(
inStripe
>=
0
)(
"Should not happen!
\n
"
);
elInRank
=
inStripe
;
}
break
;
default:
ERROR_EXIT
(
"Mode %d does not exists for checker based mesh partitioning!
\n
"
,
mode
);
...
...
@@ -131,4 +191,115 @@ namespace AMDiS {
return
true
;
}
void
CheckerPartitioner
::
createTetrahedronStripes
()
{
FUNCNAME
(
"CheckerPartitioner::createTetrahedronStripes()"
);
vector
<
vector
<
MacroElement
*>
>
stripes
;
int
nElements
=
0
;
TraverseStack
stack
;
ElInfo
*
elInfo
=
stack
.
traverseFirst
(
mesh
,
0
,
Mesh
::
CALL_EL_LEVEL
|
Mesh
::
FILL_COORDS
);
while
(
elInfo
)
{
TEST_EXIT
(
elInfo
->
getLevel
()
==
0
)(
"Should not happen!
\n
"
);
Element
*
el
=
elInfo
->
getElement
();
int
elIndex
=
el
->
getIndex
();
int
zeroCoordCounter
=
0
;
for
(
int
i
=
0
;
i
<
mesh
->
getGeo
(
VERTEX
);
i
++
)
if
(
fabs
(
elInfo
->
getCoord
(
i
)[
2
])
<
1e-10
)
zeroCoordCounter
++
;
if
(
zeroCoordCounter
==
3
)
{
vector
<
MacroElement
*>
tmp
;
tmp
.
push_back
(
elInfo
->
getMacroElement
());
stripes
.
push_back
(
tmp
);
vector
<
int
>
tmpIndex
;
tmpIndex
.
push_back
(
elInfo
->
getMacroElement
()
->
getIndex
());
elInStripe
.
push_back
(
tmpIndex
);
}
nElements
++
;
elInfo
=
stack
.
traverseNext
(
elInfo
);
}
TEST_EXIT
(
mpiSize
%
stripes
.
size
()
==
0
)
(
"Should not happen! mpiSize = %d but %d bottom elements found!
\n
"
,
mpiSize
,
stripes
.
size
());
int
testElementCounter
=
0
;
for
(
int
stripe
=
0
;
stripe
<
stripes
.
size
();
stripe
++
)
{
MacroElement
*
mel
=
stripes
[
stripe
][
0
];
set
<
int
>
localDofs
;
for
(
int
i
=
0
;
i
<
mesh
->
getGeo
(
VERTEX
);
i
++
)
if
(
fabs
(
mel
->
getCoord
(
i
)[
2
])
<
1e-10
)
localDofs
.
insert
(
i
);
TEST_EXIT
(
localDofs
.
size
()
==
3
)(
"Should not happen!
\n
"
);
while
(
mel
!=
NULL
)
{
int
replaceDof
=
-
1
;
for
(
int
i
=
0
;
i
<
mesh
->
getGeo
(
VERTEX
);
i
++
)
if
(
localDofs
.
count
(
i
)
==
0
)
replaceDof
=
i
;
bool
found
=
false
;
for
(
std
::
set
<
int
>::
iterator
dit
=
localDofs
.
begin
();
dit
!=
localDofs
.
end
();
++
dit
)
{
WorldVector
<
double
>
c0
=
mel
->
getCoord
(
*
dit
);
WorldVector
<
double
>
c1
=
mel
->
getCoord
(
replaceDof
);
if
(
fabs
(
c0
[
0
]
-
c1
[
0
])
<
1e-10
&&
fabs
(
c0
[
1
]
-
c1
[
1
])
<
1e-10
&&
fabs
(
c0
[
2
]
-
c1
[
2
])
>
1e-10
)
{
found
=
true
;
localDofs
.
erase
(
dit
);
localDofs
.
insert
(
replaceDof
);
break
;
}
}
TEST_EXIT
(
found
)(
"Should not happen!
\n
"
);
set
<
DegreeOfFreedom
>
faceDofs
;
for
(
std
::
set
<
int
>::
iterator
dit
=
localDofs
.
begin
();
dit
!=
localDofs
.
end
();
++
dit
)
faceDofs
.
insert
(
mel
->
getElement
()
->
getDof
(
*
dit
,
0
));
TEST_EXIT
(
faceDofs
.
size
()
==
3
)(
"Should not happen!
\n
"
);
int
localFace
=
-
1
;
for
(
int
i
=
0
;
i
<
mesh
->
getGeo
(
FACE
);
i
++
)
{
DofFace
face
=
mel
->
getElement
()
->
getFace
(
i
);
bool
allVertexInFace
=
faceDofs
.
count
(
face
.
get
<
0
>
())
&&
faceDofs
.
count
(
face
.
get
<
1
>
())
&&
faceDofs
.
count
(
face
.
get
<
2
>
());
if
(
allVertexInFace
)
{
localFace
=
i
;
break
;
}
}
TEST_EXIT
(
localFace
>=
0
)(
"Should not happen!
\n
"
);
MacroElement
*
mel_neigh
=
mel
->
getNeighbour
(
localFace
);
if
(
mel_neigh
)
{
stripes
[
stripe
].
push_back
(
mel_neigh
);
elInStripe
[
stripe
].
push_back
(
mel_neigh
->
getIndex
());
}
mel
=
mel_neigh
;
}
testElementCounter
+=
stripes
[
stripe
].
size
();
}
TEST_EXIT
(
testElementCounter
==
nElements
)(
"Should not happen!
\n
"
);
}
}
AMDiS/src/parallel/CheckerPartitioner.h
View file @
7c30c9c8
...
...
@@ -35,35 +35,14 @@ namespace AMDiS {
class
CheckerPartitioner
:
public
MeshPartitioner
{
public:
CheckerPartitioner
(
MPI
::
Intracomm
*
comm
)
:
MeshPartitioner
(
comm
),
mpiRank
(
mpiComm
->
Get_rank
()),
mpiSize
(
mpiComm
->
Get_size
()),
mode
(
0
),
multilevel
(
false
)
{
string
modestr
=
""
;
Parameters
::
get
(
"parallel->partitioner->mode"
,
modestr
);
if
(
modestr
==
"x-stripes"
)
mode
=
1
;
else
if
(
modestr
==
"y-stripes"
)
mode
=
2
;
else
if
(
modestr
==
"z-stripes"
)
mode
=
3
;
else
if
(
modestr
==
"multilevel"
)
multilevel
=
true
;
else
{
if
(
modestr
!=
""
)
{
ERROR_EXIT
(
"No partitioner mode
\"
%s
\"
!
\n
"
,
modestr
.
c_str
());
}
}
}
CheckerPartitioner
(
MPI
::
Intracomm
*
comm
);
~
CheckerPartitioner
()
{}
bool
createInitialPartitioning
();
void
createTetrahedronStripes
();
/// \ref MeshPartitioner::partition
bool
partition
(
map
<
int
,
double
>
&
elemWeights
,
PartitionMode
mode
=
INITIAL
)
{
...
...
@@ -81,8 +60,13 @@ namespace AMDiS {
/// 0: standard mode: each node gets one box
/// 1: x-stripes: each node gets one x-stripe of boxes
/// 2: y-stripes: each node gets one y-stripe of boxes
/// 3: z-stripes: each node gets one y-stripe of boxes
/// 4: tetrahedron-stripes: alias Hieram mode :)
int
mode
;
/// Only used in mode 4.
vector
<
vector
<
int
>
>
elInStripe
;
bool
multilevel
;
};
}
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
7c30c9c8
...
...
@@ -579,10 +579,8 @@ namespace AMDiS {
vector
<
ParallelDofMapping
*>::
iterator
it
=
find
(
dofMaps
.
begin
(),
dofMaps
.
end
(),
&
dofMap
);
TEST_EXIT
(
it
!=
dofMaps
.
end
())
(
"Cannot find Parallel DOF mapping object which should be removed!
\n
"
);
dofMaps
.
erase
(
it
);
if
(
it
!=
dofMaps
.
end
())
dofMaps
.
erase
(
it
);
}
...
...
@@ -908,7 +906,7 @@ namespace AMDiS {
{
FUNCNAME
(
"MeshDistributor::createMeshLevelStructure()"
);
int
levelMode
=
-
1
;
int
levelMode
=
0
;
Parameters
::
get
(
"parallel->level mode"
,
levelMode
);
TEST_EXIT
(
levelMode
>=
0
)(
"Wrong level mode %d!
\n
"
,
levelMode
);
...
...
@@ -1305,7 +1303,8 @@ namespace AMDiS {
{
int
mapSize
=
data
.
size
();
SerUtil
::
serialize
(
out
,
mapSize
);
for
(
map
<
int
,
map
<
const
FiniteElemSpace
*
,
DofContainer
>
>::
iterator
it
=
data
.
begin
();
it
!=
data
.
end
();
++
it
)
{
for
(
map
<
int
,
map
<
const
FiniteElemSpace
*
,
DofContainer
>
>::
iterator
it
=
data
.
begin
();
it
!=
data
.
end
();
++
it
)
{
int
rank
=
it
->
first
;
SerUtil
::
serialize
(
out
,
rank
);
...
...
AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc
View file @
7c30c9c8
...
...
@@ -69,16 +69,7 @@ namespace AMDiS {
meshLevel
=
level
;
meshDistributor
=
md
;
#if 0
mpiCommGlobal = meshDistributor->getMpiComm(meshLevel);
if (meshLevel + 1 <
meshDistributor->getMeshLevelData().getNumberOfLevels())
mpiCommLocal = meshDistributor->getMpiComm(meshLevel + 1);
else
mpiCommLocal = MPI::COMM_SELF;
#endif
domainComm
=
meshDistributor
->
getMpiComm
(
meshLevel
);
if
(
meshLevel
>=
1
)
coarseSpaceComm
=
meshDistributor
->
getMpiComm
(
meshLevel
-
1
);
}
...
...
@@ -316,6 +307,7 @@ namespace AMDiS {
FUNCNAME
(
"ParallelCoarseSpaceSolver::vecDestroy()"
);
int
nVec
=
vecSol
.
size
();
for
(
int
i
=
0
;
i
<
nVec
;
i
++
)
{
VecDestroy
(
&
vecSol
[
i
]);
VecDestroy
(
&
vecRhs
[
i
]);
...
...
@@ -391,7 +383,7 @@ namespace AMDiS {
int
groupRowsInterior
=
0
;
if
(
domainComm
.
Get_rank
()
==
0
)
groupRowsInterior
=
interiorMap
->
getOverallDofs
();
mpi
::
getDofNumbering
(
coarseSpaceComm
,
groupRowsInterior
,
rStartInterior
,
nGlobalOverallInterior
);
...
...
@@ -400,7 +392,7 @@ namespace AMDiS {
tmp
=
rStartInterior
;
domainComm
.
Allreduce
(
&
tmp
,
&
rStartInterior
,
1
,
MPI_INT
,
MPI_SUM
);
}
}
}
}
AMDiS/src/parallel/ParallelCoarseSpaceSolver.h
View file @
7c30c9c8
...
...
@@ -99,6 +99,20 @@ namespace AMDiS {
/// Destroys PETSc vector objects.
void
vecDestroy
();
/// Just for super trick
vector
<
vector
<
Mat
>
>&
getMat
()
{
return
mat
;
}
/// Just for super trick
vector
<
Vec
>&
getVecRhs
()
{
return
vecRhs
;
}
/// Get interior matrix.
inline
Mat
&
getMatInterior
()
{
...
...
@@ -243,7 +257,7 @@ namespace AMDiS {
/// zero stricture.
bool
checkMeshChange
();
pr
ivate
:
pr
otected
:
/// Matrix of PETSc matrices. mat[0][0] is the interior discretization
/// matrix, mat[1][1] corresponds to the first coarse space and so on.
/// mat[i][j], with i not equal to j, are the coupling between the interior
...
...
@@ -291,7 +305,6 @@ namespace AMDiS {
/// some phase fields.
bool
alwaysCreateNnzStructure
;
protected:
/// Prefix string for parameters in init file.
string
initFileStr
;
...
...
AMDiS/src/parallel/ParallelDebug.cc
View file @
7c30c9c8
...
...
@@ -766,10 +766,10 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDebug::printBoundaryInfo()"
);
//
int tmp = 0;
//
Parameters::get("parallel->debug->print boundary info", tmp);
//
if (tmp <= 0 && force == false)
//
return;
int
tmp
=
0
;
Parameters
::
get
(
"parallel->debug->print boundary info"
,
tmp
);
if
(
tmp
<=
0
&&
force
==
false
)
return
;
MSG
(
"Interior boundary info:
\n
"
);
...
...
@@ -1018,6 +1018,8 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDebug::writeCsvElementMap()"
);
return
;
MSG
(
"writing local Element map to CSV File
\n
"
);
Mesh
*
mesh
=
feSpace
->
getMesh
();
...
...
AMDiS/src/parallel/ParallelDofMapping.cc
View file @
7c30c9c8
...
...
@@ -74,7 +74,7 @@ namespace AMDiS {
nOverallDofs
=
0
;
rStartDofs
=
0
;
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
rStartDofs
,
nOverallDofs
);
// === If required, compute also the global indices. ===
if
(
globalMapping
)
{
...
...
AMDiS/src/parallel/PetscHelper.cc
View file @
7c30c9c8
...
...
@@ -74,7 +74,7 @@ namespace AMDiS {
}
void
blockMatMatSolve
(
KSP
ksp
,
Mat
mat0
,
Mat
&
mat1
)
void
blockMatMatSolve
(
MPI
::
Intracomm
mpiComm
,
KSP
ksp
,
Mat
mat0
,
Mat
&
mat1
)
{
// === We have to calculate mat1 = ksp mat0: ===
// === - get all local column vectors from mat0 ===
...
...
@@ -86,7 +86,7 @@ namespace AMDiS {
MatGetLocalSize
(
mat0
,
&
localRows
,
&
localCols
);
MatGetSize
(
mat0
,
&
globalRows
,
&
globalCols
);
MatCreateAIJ
(
PETSC_COMM_WORLD
,
MatCreateAIJ
(
mpiComm
,
localRows
,
localCols
,
globalRows
,
globalCols
,
150
,
PETSC_NULL
,
150
,
PETSC_NULL
,
&
mat1
);
MatSetOption
(
mat1
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
...
...
@@ -253,7 +253,7 @@ namespace AMDiS {
const
char
*
kspPrefix
,
KSPType
kspType
,
PCType
pcType
,
MatSolverPackage
matSolverPackage
,
const
MatSolverPackage
matSolverPackage
,
PetscReal
rtol
,
PetscReal
atol
,
PetscInt
maxIt
)
...
...
AMDiS/src/parallel/PetscHelper.h
View file @
7c30c9c8
...
...
@@ -77,11 +77,15 @@ namespace AMDiS {
* task. The overall number of rows of local matrices A must be the
* number of distriubted rows in B.
*
* \param[in] ksp inv(A) matrix given by a PETSc solver object.
* \param[in] mat0 matrix B
* \param[out] mat1 resulting matrix C, is created inside the function
* \param[in] mpiComm MPI Communicator object (must fit with ksp)
* \param[in] ksp inv(A) matrix given by a PETSc solver object.
* \param[in] mat0 matrix B
* \param[out] mat1 resulting matrix C, is created inside the function
*/
void
blockMatMatSolve
(
KSP
ksp
,
Mat
mat0
,
Mat
&
mat1
);
void
blockMatMatSolve
(
MPI
::
Intracomm
mpiComm
,
KSP
ksp
,
Mat
mat0
,
Mat
&
mat1
);
/** \brief
* Converts a 2x2 nested matrix to a MATAIJ matrix (thus not nested).
...
...
@@ -92,10 +96,10 @@ namespace AMDiS {
void
matNestConvert
(
Mat
matNest
,
Mat
&
mat
);
void
setSolverWithLu
(
KSP
ksp
,
const
char
*
kspPrefix
,
const
char
*
kspPrefix
,
KSPType
kspType
,
PCType
pcType
,
MatSolverPackage
matSolverPackage
,
const
MatSolverPackage
matSolverPackage
,
PetscReal
rtol
=
PETSC_DEFAULT
,
PetscReal
atol
=
PETSC_DEFAULT
,
PetscInt
maxIt
=
PETSC_DEFAULT
);
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
7c30c9c8
...
...
@@ -23,7 +23,7 @@ namespace AMDiS {
PetscSolver
::
PetscSolver
(
string
name
)
:
ParallelCoarseSpaceSolver
(
name
),
dofMap
(
FESPACE_WISE
,
true
),
dofMapS
d
(
FESPACE_WISE
,
true
),
dofMapS
ubDomain
(
FESPACE_WISE
,
true
),
parallelDofMappingsRegistered
(
false
),
kspPrefix
(
""
),
removeRhsNullspace
(
false
),
...
...
@@ -73,19 +73,21 @@ namespace AMDiS {
parallelDofMappingsRegistered
=
true
;
dofMap
.
init
(
componentSpaces
,
feSpaces
);
dofMap
.
setMpiComm
(
levelData
.
getMpiComm
(
0
));
dofMap
.
setDofComm
(
meshDistributor
->
getDofComm
(
0
));
dofMap
.
setMpiComm
(
levelData
.
getMpiComm
(
meshLevel
));
dofMap
.
setDofComm
(
meshDistributor
->
getDofComm
(
meshLevel
));
dofMap
.
clear
();
meshDistributor
->
registerDofMap
(
dofMap
);
if
(
n
Level
s
>
1
&&
levelData
.
getMpiComm
(
1
)
!=
MPI
::
COMM_SELF
)
{
MSG
(
"WARNING: MAKE GENERAL!
\n
"
);
dofMapS
d
.
init
(
componentSpaces
,
feSpaces
);
dofMapS
d
.
setMpiComm
(
levelData
.
getMpiComm
(
1
));
dofMapS
d
.
setDofComm
(
meshDistributor
->
getDofComm
(
1
));
dofMapS
d
.
clear
();
meshDistributor
->
registerDofMap
(
dofMapS
d
);
if
(
mesh
Level
+
1
<
nLevels
&&
levelData
.
getMpiComm
(
meshLevel
+
1
)
!=
MPI
::
COMM_SELF
)
{
dofMapS
ubDomain
.
init
(
componentSpaces
,
feSpaces
);
dofMapS
ubDomain
.
setMpiComm
(
levelData
.
getMpiComm
(
meshLevel
+
1
));
dofMapS
ubDomain
.
setDofComm
(
meshDistributor
->
getDofComm
(
meshLevel
+
1
));
dofMapS
ubDomain
.
clear
();
meshDistributor
->
registerDofMap
(
dofMapS
ubDomain
);
}
meshDistributor
->
updateParallelDofMappings
();
}
}
...
...
@@ -114,6 +116,12 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolver::solveGlobal()"
);
int
s
,
ls
;
VecGetSize
(
rhs
,
&
s
);
VecGetLocalSize
(
rhs
,
&
ls
);
MSG
(
"Solve global %d %d
\n
"
,
ls
,
s
);
ERROR_EXIT
(
"Not implemented!
\n
"
);
}
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
7c30c9c8
...
...
@@ -181,7 +181,10 @@ namespace AMDiS {
vector
<
const
FiniteElemSpace
*>
feSpaces
;
///
ParallelDofMapping
dofMap
,
dofMapSd
;
ParallelDofMapping
dofMap
;
///
ParallelDofMapping
dofMapSubDomain
;
/// If the parallel DOF mappaings of this solver are registered to the
/// mesh distributor object, this variable is set to true to remove them
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
7c30c9c8
This diff is collapsed.
Click to expand it.
AMDiS/src/parallel/PetscSolverFeti.h
View file @
7c30c9c8
...
...
@@ -72,6 +72,9 @@ namespace AMDiS {
/// Solve the system using FETI-DP method.
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
/// Just for the super trick
void
solveGlobal
(
Vec
&
rhs
,
Vec
&
sol
);
/// Destroys all matrix data structures.
void
destroyMatrixData
();
...
...
@@ -150,7 +153,8 @@ namespace AMDiS {
void
createMatAugmentedLagrange
();
bool
testWirebasketEdge
(
BoundaryObject
&
edge
,
const
FiniteElemSpace
*
feSpace
);
bool
testWirebasketEdge
(
BoundaryObject
&
edge
,
const
FiniteElemSpace
*
feSpace
);
///
void
createPreconditionerMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
);
...
...
@@ -171,9 +175,33 @@ namespace AMDiS {
/// Creates PETSc KSP solver object for the FETI-DP operator, \ref ksp_feti
void
createFetiKsp
();
///
void
createFetiExactKsp
();
///
void
createFetiInexactKsp
();
///
void
createFetiInexactReducedKsp
();
///
void
createFetiPreconLumped
(
PC
pc
);
///
void
createFetiPreconDirichlet
(
PC
pc
);
/// Destroys FETI-DP operator, \ref ksp_feti
void
destroyFetiKsp
();
///
void
destroyFetiExactKsp
();
///
void
destroyFetiInexactKsp
();
///
void
destroyFetiInexactReducedKsp
();
/// Create the null space of the FETI-DP operator (if there is one) and
/// attachets it to the corresponding matrices and KSP objects.
void
createNullSpace
();
...
...
@@ -204,25 +232,26 @@ namespace AMDiS {
void
recoverInterfaceSolution
(
Vec
&
vecInterface
,
SystemVector
&
vec
);
/** \brief
* Solves the FETI-DP system globally, thus without reducing it to the
* Lagrange multipliers. This should be used for debugging only to test
* if the FETI-DP system is setup correctly.
*
* \param[out] vec Solution DOF vectors.
*/
void
solveFetiMatrix
(
SystemVector
&
vec
);
///