Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
amdis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Backofen, Rainer
amdis
Commits
f1724e2f
Commit
f1724e2f
authored
May 15, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Before merging SubDomainSolver and PetscSolver together.
parent
5671aaf5
Changes
12
Show whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
437 additions
and
333 deletions
+437
-333
AMDiS/src/parallel/PetscProblemStat.cc
AMDiS/src/parallel/PetscProblemStat.cc
+4
-1
AMDiS/src/parallel/PetscSolver.cc
AMDiS/src/parallel/PetscSolver.cc
+46
-19
AMDiS/src/parallel/PetscSolver.h
AMDiS/src/parallel/PetscSolver.h
+93
-18
AMDiS/src/parallel/PetscSolverFeti.cc
AMDiS/src/parallel/PetscSolverFeti.cc
+28
-27
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
+32
-32
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
+2
-0
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+66
-77
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
+2
-0
AMDiS/src/parallel/PetscSolverSchur.cc
AMDiS/src/parallel/PetscSolverSchur.cc
+50
-57
AMDiS/src/parallel/PetscSolverSchur.h
AMDiS/src/parallel/PetscSolverSchur.h
+2
-0
AMDiS/src/parallel/SubDomainSolver.cc
AMDiS/src/parallel/SubDomainSolver.cc
+70
-85
AMDiS/src/parallel/SubDomainSolver.h
AMDiS/src/parallel/SubDomainSolver.h
+42
-17
No files found.
AMDiS/src/parallel/PetscProblemStat.cc
View file @
f1724e2f
...
...
@@ -87,7 +87,10 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
if
(
createMatrixData
)
{
petscSolver
->
setMeshDistributor
(
meshDistributor
);
petscSolver
->
setMeshDistributor
(
meshDistributor
,
meshDistributor
->
getMpiComm
(),
PETSC_COMM_SELF
);
petscSolver
->
setDofMapping
(
&
(
meshDistributor
->
getDofMap
()));
petscSolver
->
fillPetscMatrix
(
systemMatrix
);
}
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
f1724e2f
...
...
@@ -10,17 +10,21 @@
// See also license.opensource.txt in the distribution.
#include "AMDiS.h"
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
namespace
AMDiS
{
using
namespace
std
;
PetscSolver
::
PetscSolver
()
:
meshDistributor
(
NULL
),
dofMap
(
NULL
),
subdomainLevel
(
0
),
interiorMap
(
NULL
),
coarseSpaceMap
(
NULL
),
mpiRank
(
-
1
),
kspPrefix
(
""
),
removeRhsNullSpace
(
false
)
...
...
@@ -34,26 +38,49 @@ namespace AMDiS {
}
void
PetscSolver
::
printSolutionInfo
(
AdaptInfo
*
adaptInfo
,
bool
iterationCounter
,
bool
residual
)
void
PetscSolver
::
setDofMapping
(
ParallelDofMapping
*
interiorDofs
,
ParallelDofMapping
*
coarseDofs
)
{
FUNCNAME
(
"PetscSolver::printSolutionInfo()"
);
interiorMap
=
interiorDofs
;
coarseSpaceMap
=
coarseDofs
;
if
(
mpiCommLocal
.
Get_size
()
==
1
)
{
rStartInterior
=
0
;
nGlobalOverallInterior
=
interiorMap
->
getOverallDofs
();
}
else
{
int
groupRowsInterior
=
0
;
if
(
mpiCommLocal
.
Get_rank
()
==
0
)
groupRowsInterior
=
interiorMap
->
getOverallDofs
();
mpi
::
getDofNumbering
(
mpiCommGlobal
,
groupRowsInterior
,
rStartInterior
,
nGlobalOverallInterior
);
int
tmp
=
0
;
if
(
mpiCommLocal
.
Get_rank
()
==
0
)
tmp
=
rStartInterior
;
if
(
iterationCounter
)
{
int
iterations
=
0
;
KSPGetIterationNumber
(
solver
,
&
iterations
);
MSG
(
" Number of iterations: %d
\n
"
,
iterations
);
adaptInfo
->
setSolverIterations
(
iterations
);
mpiCommLocal
.
Allreduce
(
&
tmp
,
&
rStartInterior
,
1
,
MPI_INT
,
MPI_SUM
);
}
}
if
(
residual
)
{
double
norm
=
0.0
;
MatMult
(
petscMatrix
,
petscSolVec
,
petscTmpVec
);
VecAXPY
(
petscTmpVec
,
-
1.0
,
petscRhsVec
);
VecNorm
(
petscTmpVec
,
NORM_2
,
&
norm
);
MSG
(
" Residual norm: %e
\n
"
,
norm
);
void
PetscSolver
::
solve
(
Vec
&
rhs
,
Vec
&
sol
)
{
FUNCNAME
(
"PetscSolver::solve()"
);
PetscErrorCode
solverError
=
KSPSolve
(
kspInterior
,
rhs
,
sol
);
if
(
solverError
!=
0
)
{
AMDiS
::
finalize
();
exit
(
-
1
);
}
}
void
PetscSolver
::
solveGlobal
(
Vec
&
rhs
,
Vec
&
sol
)
{
FUNCNAME
(
"PetscSolver::solveGlobal()"
);
ERROR_EXIT
(
"Not implemented!
\n
"
);
}
...
...
@@ -63,13 +90,13 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
ISCreateGeneral
(
mpiComm
,
ISCreateGeneral
(
mpiComm
Global
,
originIndex
.
size
(),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
&
originIs
);
ISCreateGeneral
(
mpiComm
,
ISCreateGeneral
(
mpiComm
Global
,
destIndex
.
size
(),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
f1724e2f
...
...
@@ -50,15 +50,24 @@ namespace AMDiS {
virtual
~
PetscSolver
()
{}
void
setMeshDistributor
(
MeshDistributor
*
m
)
void
setMeshDistributor
(
MeshDistributor
*
m
,
MPI
::
Intracomm
mpiComm0
,
MPI
::
Intracomm
mpiComm1
)
{
meshDistributor
=
m
;
dofMap
=
&
(
meshDistributor
->
getDofMap
());
mpiRank
=
meshDistributor
->
getMpiRank
();
mpiComm
=
meshDistributor
->
getMpiComm
();
mpiSelfComm
=
PETSC_COMM_SELF
;
mpiCommGlobal
=
mpiComm0
;
mpiCommLocal
=
mpiComm1
;
mpiRank
=
mpiCommGlobal
.
Get_rank
();
}
void
setLevel
(
int
l
)
{
subdomainLevel
=
l
;
}
void
setDofMapping
(
ParallelDofMapping
*
interiorDofs
,
ParallelDofMapping
*
coarseDofs
=
NULL
);
/** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it.
...
...
@@ -77,6 +86,10 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
virtual
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
=
0
;
virtual
void
solve
(
Vec
&
rhs
,
Vec
&
sol
);
virtual
void
solveGlobal
(
Vec
&
rhs
,
Vec
&
sol
);
/// Destroys all matrix data structures.
virtual
void
destroyMatrixData
()
=
0
;
...
...
@@ -90,12 +103,12 @@ namespace AMDiS {
KSP
getSolver
()
{
return
solve
r
;
return
kspInterio
r
;
}
PC
getPc
()
{
return
pc
;
return
pc
Interior
;
}
void
setKspPrefix
(
std
::
string
s
)
...
...
@@ -108,11 +121,63 @@ namespace AMDiS {
removeRhsNullSpace
=
b
;
}
protected:
void
printSolutionInfo
(
AdaptInfo
*
adaptInfo
,
bool
iterationCounter
=
true
,
bool
residual
=
true
);
inline
bool
isCoarseSpace
(
const
FiniteElemSpace
*
feSpace
,
DegreeOfFreedom
dof
)
{
FUNCNAME
(
"SubDomainSolver::isCoarseSpace()"
);
TEST_EXIT_DBG
(
coarseSpaceMap
)
(
"Subdomain solver does not contain a coarse space!
\n
"
);
return
(
*
coarseSpaceMap
)[
feSpace
].
isSet
(
dof
);
}
inline
Vec
&
getRhsCoarseSpace
()
{
FUNCNAME
(
"SubDomainSolver::getRhsCoarseSpace()"
);
TEST_EXIT_DBG
(
coarseSpaceMap
)
(
"Subdomain solver does not contain a coarse space!
\n
"
);
return
rhsCoarseSpace
;
}
inline
Vec
&
getRhsInterior
()
{
return
rhsInterior
;
}
inline
Mat
&
getMatIntInt
()
{
return
matIntInt
;
}
inline
Mat
&
getMatCoarseCoarse
()
{
FUNCNAME
(
"SubDomainSolver::getMatCoarseCoarse()"
);
TEST_EXIT_DBG
(
coarseSpaceMap
)
(
"Subdomain solver does not contain a coarse space!
\n
"
);
return
matCoarseCoarse
;
}
inline
Mat
&
getMatIntCoarse
()
{
FUNCNAME
(
"SubDomainSolver::getMatIntCoarse()"
);
TEST_EXIT_DBG
(
coarseSpaceMap
)
(
"Subdomain solver does not contain a coarse space!
\n
"
);
return
matIntCoarse
;
}
inline
Mat
&
getMatCoarseInt
()
{
FUNCNAME
(
"SubDomainSolver::getMatCoarseInt()"
);
TEST_EXIT_DBG
(
coarseSpaceMap
)
(
"Subdomain solver does not contain a coarse space!
\n
"
);
return
matCoarseInt
;
}
protected:
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
...
...
@@ -140,26 +205,36 @@ namespace AMDiS {
protected:
MeshDistributor
*
meshDistributor
;
ParallelDofMapping
*
dofMap
;
int
subdomainLevel
;
int
rStartInterior
;
int
nGlobalOverallInterior
;
ParallelDofMapping
*
interiorMap
;
ParallelDofMapping
*
coarseSpaceMap
;
int
mpiRank
;
MPI
::
Intracomm
mpiComm
;
MPI
::
Intracomm
mpiComm
Global
;
MPI
::
Intracomm
mpi
SelfComm
;
MPI
::
Intracomm
mpi
CommLocal
;
/// Petsc's matrix structure.
Mat
petscMatrix
;
Mat
matIntInt
,
matCoarseCoarse
,
matIntCoarse
,
matCoarseInt
;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
Vec
petscRhsVec
,
petscSolVec
,
petscTmpVec
;
Vec
rhsInterior
;
Vec
rhsCoarseSpace
;
/// PETSc solver object
KSP
solve
r
;
KSP
kspInterio
r
;
/// PETSc preconditioner object
PC
pc
;
PC
pc
Interior
;
/// KSP database prefix
string
kspPrefix
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
f1724e2f
...
...
@@ -233,12 +233,13 @@ namespace AMDiS {
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
if
(
subDomainSolver
==
NULL
)
{
subDomainSolver
=
new
SubDomainSolver
();
if
(
meshLevel
==
0
)
{
subDomainSolver
=
new
SubDomainSolver
(
meshDistributor
,
mpiComm
,
mpiSelfComm
);
subDomainSolver
->
setMeshDistributor
(
meshDistributor
,
mpiCommGlobal
,
mpiCommLocal
);
}
else
{
subDomainSolver
=
new
SubDomainSolver
(
meshDistributor
,
subDomainSolver
->
setMeshDistributor
(
meshDistributor
,
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
subDomainSolver
->
setLevel
(
meshLevel
);
...
...
@@ -350,7 +351,7 @@ namespace AMDiS {
if
(
levelData
.
getMpiComm
(
1
).
Get_rank
()
==
0
)
groupRowsInterior
=
localDofMap
.
getOverallDofs
();
mpi
::
getDofNumbering
(
mpiComm
,
groupRowsInterior
,
mpi
::
getDofNumbering
(
mpiComm
Global
,
groupRowsInterior
,
rStartInterior
,
nGlobalOverallInterior
);
int
tmp
=
0
;
...
...
@@ -383,7 +384,7 @@ namespace AMDiS {
}
// If multi level test, inform sub domain solver about coarse space.
subDomainSolver
->
setDofMapping
(
&
primalDofMap
,
&
loc
alDofMap
);
subDomainSolver
->
setDofMapping
(
&
localDofMap
,
&
prim
alDofMap
);
}
...
...
@@ -463,7 +464,7 @@ namespace AMDiS {
map
<
int
,
std
::
set
<
DegreeOfFreedom
>
>
sdRankDofs
;
if
(
meshLevel
>
0
)
{
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
);
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
Global
);
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getRecvDofs
(),
meshLevel
,
feSpace
);
...
...
@@ -645,7 +646,7 @@ namespace AMDiS {
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
Global
,
lagrangeMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
nGlobalOverallInterior
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
...
...
@@ -705,16 +706,16 @@ namespace AMDiS {
schurPrimalData
.
subSolver
=
subDomainSolver
;
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
schurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
mpiComm
,
MatCreateShell
(
mpiComm
Global
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
...
...
@@ -724,7 +725,7 @@ namespace AMDiS {
MatShellSetOperation
(
mat_schur_primal
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatSchurPrimal
);
KSPCreate
(
mpiComm
,
&
ksp_schur_primal
);
KSPCreate
(
mpiComm
Global
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetType
(
ksp_schur_primal
,
KSPGMRES
);
...
...
@@ -742,7 +743,7 @@ namespace AMDiS {
int
nRowsRankB
=
localDofMap
.
getRankDofs
();
Mat
matBPi
;
MatCreateMPIAIJ
(
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
Global
,
nRowsRankB
,
nRowsRankPrimal
,
nGlobalOverallInterior
,
nRowsOverallPrimal
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
...
...
@@ -810,7 +811,7 @@ namespace AMDiS {
MatGetInfo
(
mat_schur_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
KSPCreate
(
mpiComm
,
&
ksp_schur_primal
);
KSPCreate
(
mpiComm
Global
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
...
...
@@ -853,20 +854,20 @@ namespace AMDiS {
fetiData
.
subSolver
=
subDomainSolver
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
(
fetiData
.
tmp_vec_b
));
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_lagrange
));
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_primal
));
MatCreateShell
(
mpiComm
,
MatCreateShell
(
mpiComm
Global
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
...
...
@@ -875,7 +876,7 @@ namespace AMDiS {
MatShellSetOperation
(
mat_feti
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatFeti
);
KSPCreate
(
mpiComm
,
&
ksp_feti
);
KSPCreate
(
mpiComm
Global
,
&
ksp_feti
);
KSPSetOperators
(
ksp_feti
,
mat_feti
,
mat_feti
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_feti
,
"feti_"
);
KSPSetType
(
ksp_feti
,
KSPGMRES
);
...
...
@@ -913,7 +914,7 @@ namespace AMDiS {
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
...
...
@@ -958,7 +959,7 @@ namespace AMDiS {
}
}
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
...
...
@@ -1343,18 +1344,18 @@ namespace AMDiS {
// Some temporary vectors.
Vec
tmp_b0
,
tmp_b1
,
tmp_lagrange0
,
tmp_primal0
,
tmp_primal1
;
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
tmp_b0
);
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
tmp_b1
);
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
tmp_primal0
);
VecCreateMPI
(
mpiComm
,
VecCreateMPI
(
mpiComm
Global
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
tmp_primal1
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
f1724e2f
...
...
@@ -21,14 +21,14 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::fillPetscMatrix()"
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT_DBG
(
dof
Map
)(
"No parallel mapping object defined!
\n
"
);
TEST_EXIT_DBG
(
interior
Map
)(
"No parallel mapping object defined!
\n
"
);
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
(
*
dof
Map
)[
feSpace
].
nRankDofs
;
int
nOverallRows
=
(
*
dof
Map
)[
feSpace
].
nOverallDofs
;
int
nRankRows
=
(
*
interior
Map
)[
feSpace
].
nRankDofs
;
int
nOverallRows
=
(
*
interior
Map
)[
feSpace
].
nOverallDofs
;
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
...
...
@@ -54,7 +54,7 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nBlocks
;
i
++
)
for
(
int
j
=
0
;
j
<
nBlocks
;
j
++
)
MatCreateMPIAIJ
(
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
Global
,
nRankRows
*
blockSize
[
i
],
nRankRows
*
blockSize
[
j
],
nOverallRows
*
blockSize
[
i
],
nOverallRows
*
blockSize
[
j
],
30
*
blockSize
[
i
],
PETSC_NULL
,
...
...
@@ -80,21 +80,21 @@ namespace AMDiS {
}
MatCreateNest
(
mpiComm
,
MatCreateNest
(
mpiComm
Global
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
&
(
nestMat
[
0
]),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
matIntInt
);
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 2 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
MatAssemblyBegin
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
matIntInt
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
matIntInt
,
MAT_FINAL_ASSEMBLY
);
// === Init PETSc solver. ===
KSPCreate
(
mpiComm
,
&
solve
r
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
solve
r
);
KSPCreate
(
mpiComm
Global
,
&
kspInterio
r
);
KSPSetOperators
(
kspInterior
,
matIntInt
,
matIntInt
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
kspInterio
r
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
...
...
@@ -108,13 +108,13 @@ namespace AMDiS {
nComponents
=
vec
->
getSize
();
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
int
nRankRows
=
(
*
dof
Map
)[
feSpace
].
nRankDofs
;
int
nOverallRows
=
(
*
dof
Map
)[
feSpace
].
nOverallDofs
;
int
nRankRows
=
(
*
interior
Map
)[
feSpace
].
nRankDofs
;
int
nOverallRows
=
(
*
interior
Map
)[
feSpace
].
nOverallDofs
;
nestVec
.
resize
(
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
VecCreateMPI
(
mpiComm
Global
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
...
...
@@ -122,11 +122,11 @@ namespace AMDiS {
VecAssemblyEnd
(
nestVec
[
i
]);
}
VecCreateNest
(
mpiComm
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecCreateNest
(
mpiComm
Global
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
rhsInterior
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
}
...
...
@@ -135,14 +135,14 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
KSPGetPC
(
kspInterior
,
&
pcInterior
);
setBlockPreconditioner
(
pc
Interior
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
VecDuplicate
(
petscRhsVec
,
&
petscSolVec
);
VecDuplicate
(
rhsInterior
,
&
petscSolVec
);
// PETSc.
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
solve
(
rhsInterior
,
petscSolVec
);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
...
...
@@ -151,11 +151,11 @@ namespace AMDiS {
Vec
tmp
;
VecNestGetSubVec
(
petscSolVec
,
i
,
&
tmp
);
int
nRankDofs
=
(
*
dof
Map
)[
feSpace
].
nRankDofs
;
int
nRankDofs
=
(
*
interior
Map
)[
feSpace
].
nRankDofs
;
PetscScalar
*
vecPointer
;
VecGetArray
(
tmp
,
&
vecPointer
);
DofMap
&
d
=
(
*
dof
Map
)[
feSpace
].
getMap
();
DofMap
&
d
=
(
*
interior
Map
)[
feSpace
].
getMap
();
for
(
DofMap
::
iterator
it
=
d
.
begin
();
it
!=
d
.
end
();
++
it
)
if
(
it
->
second
.
local
!=
-
1
)
dofvec
[
it
->
first
]
=
vecPointer
[
it
->
second
.
local
];
...
...
@@ -178,8 +178,8 @@ namespace AMDiS {
if
(
nestMat
[
i
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
]));
MatDestroy
(
&
petscMatrix
);
KSPDestroy
(
&
solve
r
);
MatDestroy
(
&
matIntInt
);
KSPDestroy
(
&
kspInterio
r
);
}
...
...
@@ -187,7 +187,7 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::destroyVectorData()"
);
VecDestroy
(
&
petscRhsVec
);
VecDestroy
(
&
rhsInterior
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
VecDestroy
(
&
(
nestVec
[
i
]));
...
...
@@ -217,8 +217,8 @@ namespace AMDiS {
typedef
traits
::
range_generator
<
row
,
Matrix
>::
type
cursor_type
;
typedef
traits
::
range_generator
<
nz
,
cursor_type
>::
type
icursor_type
;
int
dispRowIndex
=
(
*
dof
Map
)[
feSpace
].
nRankDofs
*
dispRowBlock
;
int
dispColIndex
=
(
*
dof
Map
)[
feSpace
].
nRankDofs
*
dispColBlock
;
int
dispRowIndex
=
(
*
interior
Map
)[
feSpace
].
nRankDofs
*
dispRowBlock
;
int
dispColIndex
=
(
*
interior
Map
)[
feSpace
].
nRankDofs
*
dispColBlock
;
vector
<
int
>
cols
;
vector
<
double
>
values
;
...
...
@@ -232,7 +232,7 @@ namespace AMDiS {
cend
=
end
<
row
>
(
mat
->
getBaseMatrix
());
cursor
!=
cend
;
++
cursor
)
{
// Global index of the current row DOF.
int
rowIndex
=
(
*
dof
Map
)[
feSpace
][
*
cursor
].
global
+
dispRowIndex
;
int
rowIndex
=
(
*
interior
Map
)[
feSpace
][
*
cursor
].
global
+
dispRowIndex
;
cols
.
clear
();
values
.
clear
();
...
...
@@ -240,7 +240,7 @@ namespace AMDiS {
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
// Global index of the current column index.
int
colIndex
=
(
*
dof
Map
)[
feSpace
][
col
(
*
icursor
)].
global
+
dispColIndex
;