Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
0a2b884b
Commit
0a2b884b
authored
Apr 10, 2012
by
Thomas Witkowski
Browse files
Removed most COMM_WORLD in parallel code and replaced through a communicator variable.
parent
0e4b89a6
Changes
7
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
0a2b884b
...
@@ -91,9 +91,9 @@ namespace AMDiS {
...
@@ -91,9 +91,9 @@ namespace AMDiS {
{
{
FUNCNAME
(
"MeshDistributor::ParalleDomainBase()"
);
FUNCNAME
(
"MeshDistributor::ParalleDomainBase()"
);
mpiRank
=
MPI
::
COMM_WORLD
.
Get_rank
();
mpiSize
=
MPI
::
COMM_WORLD
.
Get_size
();
mpiComm
=
MPI
::
COMM_WORLD
;
mpiComm
=
MPI
::
COMM_WORLD
;
mpiRank
=
mpiComm
.
Get_rank
();
mpiSize
=
mpiComm
.
Get_size
();
Parameters
::
get
(
name
+
"->repartitioning"
,
repartitioningAllowed
);
Parameters
::
get
(
name
+
"->repartitioning"
,
repartitioningAllowed
);
Parameters
::
get
(
name
+
"->debug output dir"
,
debugOutputDir
);
Parameters
::
get
(
name
+
"->debug output dir"
,
debugOutputDir
);
...
...
AMDiS/src/parallel/ParallelDebug.cc
View file @
0a2b884b
...
@@ -874,9 +874,9 @@ namespace AMDiS {
...
@@ -874,9 +874,9 @@ namespace AMDiS {
oss
<<
"-"
<<
counter
;
oss
<<
"-"
<<
counter
;
oss
<<
".vtu"
;
oss
<<
".vtu"
;
DOFVector
<
double
>
tmp
a
(
feSpace
,
"tmp"
);
DOFVector
<
double
>
tmp
(
feSpace
,
"tmp"
);
tmp
a
.
set
(
MPI
::
COMM_WORLD
.
Get_rank
());
tmp
.
set
(
MPI
::
COMM_WORLD
.
Get_rank
());
VtkWriter
::
writeFile
(
&
tmp
a
,
oss
.
str
());
VtkWriter
::
writeFile
(
&
tmp
,
oss
.
str
());
}
}
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
0a2b884b
...
@@ -62,13 +62,13 @@ namespace AMDiS {
...
@@ -62,13 +62,13 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolver::copyVec()"
);
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
IS
originIs
,
destIs
;
ISCreateGeneral
(
PETSC_COMM_WORLD
,
ISCreateGeneral
(
*
mpiComm
,
originIndex
.
size
(),
originIndex
.
size
(),
&
(
originIndex
[
0
]),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
PETSC_USE_POINTER
,
&
originIs
);
&
originIs
);
ISCreateGeneral
(
PETSC_COMM_WORLD
,
ISCreateGeneral
(
*
mpiComm
,
destIndex
.
size
(),
destIndex
.
size
(),
&
(
destIndex
[
0
]),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
PETSC_USE_POINTER
,
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
0a2b884b
...
@@ -422,7 +422,7 @@ namespace AMDiS {
...
@@ -422,7 +422,7 @@ namespace AMDiS {
// === Create distributed matrix for Lagrange constraints. ===
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
lagrangeMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
...
@@ -483,14 +483,14 @@ namespace AMDiS {
...
@@ -483,14 +483,14 @@ namespace AMDiS {
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
fetiSolver
=
this
;
schurPrimalData
.
fetiSolver
=
this
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
schurPrimalData
.
tmp_vec_b
));
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
schurPrimalData
.
tmp_vec_primal
));
&
(
schurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
MatCreateShell
(
*
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
&
schurPrimalData
,
&
schurPrimalData
,
...
@@ -498,7 +498,7 @@ namespace AMDiS {
...
@@ -498,7 +498,7 @@ namespace AMDiS {
MatShellSetOperation
(
mat_schur_primal
,
MATOP_MULT
,
MatShellSetOperation
(
mat_schur_primal
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatSchurPrimal
);
(
void
(
*
)(
void
))
petscMultMatSchurPrimal
);
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_schur_primal
);
KSPCreate
(
*
mpiComm
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetType
(
ksp_schur_primal
,
KSPGMRES
);
KSPSetType
(
ksp_schur_primal
,
KSPGMRES
);
...
@@ -516,7 +516,7 @@ namespace AMDiS {
...
@@ -516,7 +516,7 @@ namespace AMDiS {
int
nRowsOverallB
=
localDofMap
.
getOverallDofs
();
int
nRowsOverallB
=
localDofMap
.
getOverallDofs
();
Mat
matBPi
;
Mat
matBPi
;
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
...
@@ -539,9 +539,6 @@ namespace AMDiS {
...
@@ -539,9 +539,6 @@ namespace AMDiS {
MatRestoreRow
(
mat_b_primal
,
row
,
&
nCols
,
&
cols
,
&
values
);
MatRestoreRow
(
mat_b_primal
,
row
,
&
nCols
,
&
cols
,
&
values
);
}
}
int
maxLocalPrimal
=
mat_b_primal_cols
.
size
();
mpi
::
globalMax
(
maxLocalPrimal
);
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
primalDofMap
.
getLocalDofs
())
primalDofMap
.
getLocalDofs
())
(
"Should not happen!
\n
"
);
(
"Should not happen!
\n
"
);
...
@@ -585,7 +582,7 @@ namespace AMDiS {
...
@@ -585,7 +582,7 @@ namespace AMDiS {
MatGetInfo
(
mat_primal_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MatGetInfo
(
mat_primal_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_schur_primal
);
KSPCreate
(
*
mpiComm
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_primal_primal
,
KSPSetOperators
(
ksp_schur_primal
,
mat_primal_primal
,
mat_primal_primal
,
SAME_NONZERO_PATTERN
);
mat_primal_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
...
@@ -635,24 +632,24 @@ namespace AMDiS {
...
@@ -635,24 +632,24 @@ namespace AMDiS {
fetiData
.
fetiSolver
=
this
;
fetiData
.
fetiSolver
=
this
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_b
));
&
(
fetiData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_lagrange
));
&
(
fetiData
.
tmp_vec_lagrange
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_primal
));
&
(
fetiData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
MatCreateShell
(
*
mpiComm
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
&
fetiData
,
&
mat_feti
);
&
fetiData
,
&
mat_feti
);
MatShellSetOperation
(
mat_feti
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatFeti
);
MatShellSetOperation
(
mat_feti
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatFeti
);
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_feti
);
KSPCreate
(
*
mpiComm
,
&
ksp_feti
);
KSPSetOperators
(
ksp_feti
,
mat_feti
,
mat_feti
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
ksp_feti
,
mat_feti
,
mat_feti
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_feti
,
"feti_"
);
KSPSetOptionsPrefix
(
ksp_feti
,
"feti_"
);
KSPSetType
(
ksp_feti
,
KSPGMRES
);
KSPSetType
(
ksp_feti
,
KSPGMRES
);
...
@@ -687,7 +684,7 @@ namespace AMDiS {
...
@@ -687,7 +684,7 @@ namespace AMDiS {
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
...
@@ -732,7 +729,7 @@ namespace AMDiS {
...
@@ -732,7 +729,7 @@ namespace AMDiS {
}
}
}
}
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
...
@@ -927,17 +924,17 @@ namespace AMDiS {
...
@@ -927,17 +924,17 @@ namespace AMDiS {
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsRankB
,
nRowsRankB
,
60
,
PETSC_NULL
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsRankB
,
nRowsRankB
,
60
,
PETSC_NULL
,
&
mat_b_b
);
&
mat_b_b
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_primal_primal
);
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_primal_primal
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_b_primal
);
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_b_primal
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
mat_primal_b
);
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
mat_primal_b
);
...
@@ -1222,9 +1219,9 @@ namespace AMDiS {
...
@@ -1222,9 +1219,9 @@ namespace AMDiS {
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
f_b
);
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
f_b
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
*
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
f_primal
);
&
f_primal
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
0a2b884b
...
@@ -53,7 +53,7 @@ namespace AMDiS {
...
@@ -53,7 +53,7 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nBlocks
;
i
++
)
for
(
int
i
=
0
;
i
<
nBlocks
;
i
++
)
for
(
int
j
=
0
;
j
<
nBlocks
;
j
++
)
for
(
int
j
=
0
;
j
<
nBlocks
;
j
++
)
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRankRows
*
blockSize
[
i
],
nRankRows
*
blockSize
[
j
],
nRankRows
*
blockSize
[
i
],
nRankRows
*
blockSize
[
j
],
nOverallRows
*
blockSize
[
i
],
nOverallRows
*
blockSize
[
j
],
nOverallRows
*
blockSize
[
i
],
nOverallRows
*
blockSize
[
j
],
30
*
blockSize
[
i
],
PETSC_NULL
,
30
*
blockSize
[
i
],
PETSC_NULL
,
...
@@ -79,7 +79,7 @@ namespace AMDiS {
...
@@ -79,7 +79,7 @@ namespace AMDiS {
}
}
MatCreateNest
(
PETSC_COMM_WORLD
,
MatCreateNest
(
*
mpiComm
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
&
(
nestMat
[
0
]),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
petscMatrix
);
...
@@ -91,7 +91,7 @@ namespace AMDiS {
...
@@ -91,7 +91,7 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
// === Init PETSc solver. ===
// === Init PETSc solver. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPCreate
(
*
mpiComm
,
&
solver
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
solver
);
KSPSetFromOptions
(
solver
);
...
@@ -113,7 +113,7 @@ namespace AMDiS {
...
@@ -113,7 +113,7 @@ namespace AMDiS {
nestVec
.
resize
(
nComponents
);
nestVec
.
resize
(
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
...
@@ -121,7 +121,7 @@ namespace AMDiS {
...
@@ -121,7 +121,7 @@ namespace AMDiS {
VecAssemblyEnd
(
nestVec
[
i
]);
VecAssemblyEnd
(
nestVec
[
i
]);
}
}
VecCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
VecCreateNest
(
*
mpiComm
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
0a2b884b
...
@@ -31,8 +31,8 @@ namespace AMDiS {
...
@@ -31,8 +31,8 @@ namespace AMDiS {
// === Create PETSc vector (solution and a temporary vector). ===
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
testddd
=
1
;
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
Parameters
::
get
(
"block size"
,
testddd
);
...
@@ -70,7 +70,7 @@ namespace AMDiS {
...
@@ -70,7 +70,7 @@ namespace AMDiS {
// === Create PETSc matrix with the computed nnz data structure. ===
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankRows
,
nRankRows
,
MatCreateMPIAIJ
(
*
mpiComm
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_nnz
,
&
petscMatrix
);
0
,
d_nnz
,
0
,
o_nnz
,
&
petscMatrix
);
...
@@ -109,7 +109,7 @@ namespace AMDiS {
...
@@ -109,7 +109,7 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
// === Init PETSc solver. ===
// === Init PETSc solver. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPCreate
(
*
mpiComm
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
...
@@ -137,7 +137,7 @@ namespace AMDiS {
...
@@ -137,7 +137,7 @@ namespace AMDiS {
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
);
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
);
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
int
testddd
=
1
;
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
Parameters
::
get
(
"block size"
,
testddd
);
...
@@ -155,7 +155,7 @@ namespace AMDiS {
...
@@ -155,7 +155,7 @@ namespace AMDiS {
if
(
removeRhsNullSpace
)
{
if
(
removeRhsNullSpace
)
{
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MatNullSpace
sp
;
MatNullSpace
sp
;
MatNullSpaceCreate
(
PETSC_COMM_WORLD
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
MatNullSpaceCreate
(
*
mpiComm
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
MatNullSpaceRemove
(
sp
,
petscRhsVec
,
PETSC_NULL
);
MatNullSpaceRemove
(
sp
,
petscRhsVec
,
PETSC_NULL
);
MatNullSpaceDestroy
(
&
sp
);
MatNullSpaceDestroy
(
&
sp
);
}
}
...
...
AMDiS/src/parallel/PetscSolverSchur.cc
View file @
0a2b884b
...
@@ -163,12 +163,12 @@ namespace AMDiS {
...
@@ -163,12 +163,12 @@ namespace AMDiS {
// === Create PETSc IS structurs for interior and boundary DOFs. ===
// === Create PETSc IS structurs for interior and boundary DOFs. ===
ISCreateStride
(
PETSC_COMM_WORLD
,
ISCreateStride
(
*
mpiComm
,
nInteriorDofs
*
nComponents
,
nInteriorDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
)
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
)
*
nComponents
,
1
,
&
interiorIs
);
1
,
&
interiorIs
);
ISCreateStride
(
PETSC_COMM_WORLD
,
ISCreateStride
(
*
mpiComm
,
nBoundaryDofs
*
nComponents
,
nBoundaryDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
+
nInteriorDofs
)
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
+
nInteriorDofs
)
*
nComponents
,
1
,
&
boundaryIs
);
1
,
&
boundaryIs
);
...
@@ -189,22 +189,22 @@ namespace AMDiS {
...
@@ -189,22 +189,22 @@ namespace AMDiS {
int
nOverallBoundaryRows
=
nOverallBoundaryDofs
*
nComponents
;
int
nOverallBoundaryRows
=
nOverallBoundaryDofs
*
nComponents
;
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nInteriorRows
,
nInteriorRows
,
nInteriorRows
,
nInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA11
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA11
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nBoundaryRows
,
nBoundaryRows
,
nBoundaryRows
,
nBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA22
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA22
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nInteriorRows
,
nBoundaryRows
,
nInteriorRows
,
nBoundaryRows
,
nOverallInteriorRows
,
nOverallBoundaryRows
,
nOverallInteriorRows
,
nOverallBoundaryRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA12
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA12
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
MatCreateMPIAIJ
(
*
mpiComm
,
nBoundaryRows
,
nInteriorRows
,
nBoundaryRows
,
nInteriorRows
,
nOverallBoundaryRows
,
nOverallInteriorRows
,
nOverallBoundaryRows
,
nOverallInteriorRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA21
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA21
);
...
@@ -237,7 +237,7 @@ namespace AMDiS {
...
@@ -237,7 +237,7 @@ namespace AMDiS {
tmpIS
[
0
]
=
interiorIs
;
tmpIS
[
0
]
=
interiorIs
;
tmpIS
[
1
]
=
boundaryIs
;
tmpIS
[
1
]
=
boundaryIs
;
MatCreateNest
(
PETSC_COMM_WORLD
,
2
,
&
tmpIS
[
0
],
2
,
&
tmpIS
[
0
],
&
tmpMat
[
0
][
0
],
&
petscMatrix
);
MatCreateNest
(
*
mpiComm
,
2
,
&
tmpIS
[
0
],
2
,
&
tmpIS
[
0
],
&
tmpMat
[
0
][
0
],
&
petscMatrix
);
MatNestSetVecType
(
petscMatrix
,
VECNEST
);
MatNestSetVecType
(
petscMatrix
,
VECNEST
);
MatAssemblyBegin
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
...
@@ -246,8 +246,8 @@ namespace AMDiS {
...
@@ -246,8 +246,8 @@ namespace AMDiS {
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponents
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
}
}
...
@@ -260,7 +260,7 @@ namespace AMDiS {
...
@@ -260,7 +260,7 @@ namespace AMDiS {
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponents
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
...
@@ -278,7 +278,7 @@ namespace AMDiS {
...
@@ -278,7 +278,7 @@ namespace AMDiS {
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
int
nComponents
=
vec
.
getSize
();
int
nComponents
=
vec
.
getSize
();
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPCreate
(
*
mpiComm
,
&
solver
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment