Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
a33fd12b
Commit
a33fd12b
authored
Apr 12, 2012
by
Thomas Witkowski
Browse files
More work on FETI-DP for multilevel test.
parent
5161a040
Changes
11
Hide whitespace changes
Inline
Side-by-side
AMDiS/CMakeLists.txt
View file @
a33fd12b
...
@@ -252,7 +252,8 @@ if(ENABLE_PARALLEL_DOMAIN)
...
@@ -252,7 +252,8 @@ if(ENABLE_PARALLEL_DOMAIN)
${
SOURCE_DIR
}
/parallel/PetscSolverFeti.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFeti.cc
${
SOURCE_DIR
}
/parallel/PetscSolverGlobalMatrix.cc
${
SOURCE_DIR
}
/parallel/PetscSolverGlobalMatrix.cc
${
SOURCE_DIR
}
/parallel/PetscSolverGlobalBlockMatrix.cc
${
SOURCE_DIR
}
/parallel/PetscSolverGlobalBlockMatrix.cc
${
SOURCE_DIR
}
/parallel/PetscSolverSchur.cc
)
${
SOURCE_DIR
}
/parallel/PetscSolverSchur.cc
${
SOURCE_DIR
}
/parallel/SubDomainSolver.cc
)
elseif
(
ENABLE_PARALLEL_DOMAIN STREQUAL
"PMTL"
)
elseif
(
ENABLE_PARALLEL_DOMAIN STREQUAL
"PMTL"
)
set
(
MTL_INCLUDE_DIR
""
)
set
(
MTL_INCLUDE_DIR
""
)
find_package
(
MTL REQUIRED
)
find_package
(
MTL REQUIRED
)
...
...
AMDiS/src/parallel/ParallelDofMapping.cc
View file @
a33fd12b
...
@@ -40,7 +40,7 @@ namespace AMDiS {
...
@@ -40,7 +40,7 @@ namespace AMDiS {
nOverallDofs
=
0
;
nOverallDofs
=
0
;
rStartDofs
=
0
;
rStartDofs
=
0
;
mpi
::
getDofNumbering
(
*
mpiComm
,
nRankDofs
,
rStartDofs
,
nOverallDofs
);
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
rStartDofs
,
nOverallDofs
);
// === If required, compute also the global indices. ===
// === If required, compute also the global indices. ===
...
@@ -69,7 +69,7 @@ namespace AMDiS {
...
@@ -69,7 +69,7 @@ namespace AMDiS {
// === Send all global indices of DOFs that are owned by the rank to all ===
// === Send all global indices of DOFs that are owned by the rank to all ===
// === other ranks that also include this DOF. ===
// === other ranks that also include this DOF. ===
StdMpi
<
vector
<
int
>
>
stdMpi
(
*
mpiComm
);
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
);
for
(
DofComm
::
Iterator
it
(
*
sendDofs
,
feSpace
);
!
it
.
end
();
it
.
nextRank
())
for
(
DofComm
::
Iterator
it
(
*
sendDofs
,
feSpace
);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
if
(
dofMap
.
count
(
it
.
getDofIndex
())
&&
!
nonRankDofs
.
count
(
it
.
getDofIndex
()))
if
(
dofMap
.
count
(
it
.
getDofIndex
())
&&
!
nonRankDofs
.
count
(
it
.
getDofIndex
()))
...
@@ -111,7 +111,7 @@ namespace AMDiS {
...
@@ -111,7 +111,7 @@ namespace AMDiS {
}
}
void
ParallelDofMapping
::
init
(
MPI
::
Intracomm
*
m
,
void
ParallelDofMapping
::
init
(
MPI
::
Intracomm
m
,
vector
<
const
FiniteElemSpace
*>
&
fe
,
vector
<
const
FiniteElemSpace
*>
&
fe
,
vector
<
const
FiniteElemSpace
*>
&
uniqueFe
,
vector
<
const
FiniteElemSpace
*>
&
uniqueFe
,
bool
needGlobalMapping
,
bool
needGlobalMapping
,
...
@@ -297,7 +297,7 @@ namespace AMDiS {
...
@@ -297,7 +297,7 @@ namespace AMDiS {
// === Communicate the matrix indices for all DOFs that are on some ===
// === Communicate the matrix indices for all DOFs that are on some ===
// === interior boundaries. ===
// === interior boundaries. ===
StdMpi
<
vector
<
DegreeOfFreedom
>
>
stdMpi
(
*
mpiComm
);
StdMpi
<
vector
<
DegreeOfFreedom
>
>
stdMpi
(
mpiComm
);
for
(
DofComm
::
Iterator
it
(
*
sendDofs
,
feSpaces
[
i
]);
for
(
DofComm
::
Iterator
it
(
*
sendDofs
,
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
!
it
.
end
();
it
.
nextRank
())
{
vector
<
DegreeOfFreedom
>
sendGlobalDofs
;
vector
<
DegreeOfFreedom
>
sendGlobalDofs
;
...
...
AMDiS/src/parallel/ParallelDofMapping.h
View file @
a33fd12b
...
@@ -109,7 +109,7 @@ namespace AMDiS {
...
@@ -109,7 +109,7 @@ namespace AMDiS {
}
}
/// This is the only valid constructur to be used.
/// This is the only valid constructur to be used.
FeSpaceDofMap
(
MPI
::
Intracomm
*
m
)
FeSpaceDofMap
(
MPI
::
Intracomm
m
)
:
mpiComm
(
m
),
:
mpiComm
(
m
),
sendDofs
(
NULL
),
sendDofs
(
NULL
),
recvDofs
(
NULL
),
recvDofs
(
NULL
),
...
@@ -120,11 +120,7 @@ namespace AMDiS {
...
@@ -120,11 +120,7 @@ namespace AMDiS {
nLocalDofs
(
0
),
nLocalDofs
(
0
),
nOverallDofs
(
0
),
nOverallDofs
(
0
),
rStartDofs
(
0
)
rStartDofs
(
0
)
{
{}
FUNCNAME
(
"FeSpaceDofMap::FeSpaceDofMap()"
);
TEST_EXIT
(
mpiComm
)(
"No MPI Communicator specified!
\n
"
);
}
/// Clears all data of the mapping.
/// Clears all data of the mapping.
void
clear
();
void
clear
();
...
@@ -230,7 +226,7 @@ namespace AMDiS {
...
@@ -230,7 +226,7 @@ namespace AMDiS {
private:
private:
/// MPI communicator object;
/// MPI communicator object;
MPI
::
Intracomm
*
mpiComm
;
MPI
::
Intracomm
mpiComm
;
/// DOF communicators for all DOFs on interior boundaries.
/// DOF communicators for all DOFs on interior boundaries.
DofComm
*
sendDofs
,
*
recvDofs
;
DofComm
*
sendDofs
,
*
recvDofs
;
...
@@ -291,7 +287,7 @@ namespace AMDiS {
...
@@ -291,7 +287,7 @@ namespace AMDiS {
* \param[in] bNonLocalDofs If true, at least one rank's mapping con-
* \param[in] bNonLocalDofs If true, at least one rank's mapping con-
* taines DOFs that are not owend by the rank.
* taines DOFs that are not owend by the rank.
*/
*/
void
init
(
MPI
::
Intracomm
*
m
,
void
init
(
MPI
::
Intracomm
m
,
vector
<
const
FiniteElemSpace
*>
&
fe
,
vector
<
const
FiniteElemSpace
*>
&
fe
,
vector
<
const
FiniteElemSpace
*>
&
uniqueFe
,
vector
<
const
FiniteElemSpace
*>
&
uniqueFe
,
bool
needGlobalMapping
,
bool
needGlobalMapping
,
...
@@ -390,7 +386,7 @@ namespace AMDiS {
...
@@ -390,7 +386,7 @@ namespace AMDiS {
private:
private:
/// MPI communicator object;
/// MPI communicator object;
MPI
::
Intracomm
*
mpiComm
;
MPI
::
Intracomm
mpiComm
;
/// DOF communicators for all DOFs on interior boundaries.
/// DOF communicators for all DOFs on interior boundaries.
DofComm
*
sendDofs
,
*
recvDofs
;
DofComm
*
sendDofs
,
*
recvDofs
;
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
a33fd12b
...
@@ -62,13 +62,13 @@ namespace AMDiS {
...
@@ -62,13 +62,13 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolver::copyVec()"
);
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
IS
originIs
,
destIs
;
ISCreateGeneral
(
*
mpiComm
,
ISCreateGeneral
(
mpiComm
,
originIndex
.
size
(),
originIndex
.
size
(),
&
(
originIndex
[
0
]),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
PETSC_USE_POINTER
,
&
originIs
);
&
originIs
);
ISCreateGeneral
(
*
mpiComm
,
ISCreateGeneral
(
mpiComm
,
destIndex
.
size
(),
destIndex
.
size
(),
&
(
destIndex
[
0
]),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
PETSC_USE_POINTER
,
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
a33fd12b
...
@@ -54,7 +54,8 @@ namespace AMDiS {
...
@@ -54,7 +54,8 @@ namespace AMDiS {
{
{
meshDistributor
=
m
;
meshDistributor
=
m
;
mpiRank
=
meshDistributor
->
getMpiRank
();
mpiRank
=
meshDistributor
->
getMpiRank
();
mpiComm
=
&
(
meshDistributor
->
getMpiComm
());
mpiComm
=
meshDistributor
->
getMpiComm
();
mpiSelfComm
=
PETSC_COMM_SELF
;
}
}
/** \brief
/** \brief
...
@@ -137,7 +138,9 @@ namespace AMDiS {
...
@@ -137,7 +138,9 @@ namespace AMDiS {
int
mpiRank
;
int
mpiRank
;
MPI
::
Intracomm
*
mpiComm
;
MPI
::
Intracomm
mpiComm
;
MPI
::
Intracomm
mpiSelfComm
;
/// Petsc's matrix structure.
/// Petsc's matrix structure.
Mat
petscMatrix
;
Mat
petscMatrix
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
a33fd12b
...
@@ -214,8 +214,8 @@ namespace AMDiS {
...
@@ -214,8 +214,8 @@ namespace AMDiS {
Parameters
::
get
(
"parallel->multi level test"
,
multiLevelTest
);
Parameters
::
get
(
"parallel->multi level test"
,
multiLevelTest
);
if
(
multiLevelTest
)
{
if
(
multiLevelTest
)
{
//
subDomainSolver = new SubDomainSolver(meshDistributor, mpiComm,
&PETSC_COMM_SELF
);
subDomainSolver
=
new
SubDomainSolver
(
meshDistributor
,
mpiComm
,
mpiSelfComm
);
}
}
}
}
...
@@ -436,7 +436,7 @@ namespace AMDiS {
...
@@ -436,7 +436,7 @@ namespace AMDiS {
// === Create distributed matrix for Lagrange constraints. ===
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
lagrangeMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
...
@@ -497,14 +497,14 @@ namespace AMDiS {
...
@@ -497,14 +497,14 @@ namespace AMDiS {
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
fetiSolver
=
this
;
schurPrimalData
.
fetiSolver
=
this
;
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
schurPrimalData
.
tmp_vec_b
));
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
schurPrimalData
.
tmp_vec_primal
));
&
(
schurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
*
mpiComm
,
MatCreateShell
(
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getOverallDofs
(),
&
schurPrimalData
,
&
schurPrimalData
,
...
@@ -512,7 +512,7 @@ namespace AMDiS {
...
@@ -512,7 +512,7 @@ namespace AMDiS {
MatShellSetOperation
(
mat_schur_primal
,
MATOP_MULT
,
MatShellSetOperation
(
mat_schur_primal
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatSchurPrimal
);
(
void
(
*
)(
void
))
petscMultMatSchurPrimal
);
KSPCreate
(
*
mpiComm
,
&
ksp_schur_primal
);
KSPCreate
(
mpiComm
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetType
(
ksp_schur_primal
,
KSPGMRES
);
KSPSetType
(
ksp_schur_primal
,
KSPGMRES
);
...
@@ -530,7 +530,7 @@ namespace AMDiS {
...
@@ -530,7 +530,7 @@ namespace AMDiS {
int
nRowsOverallB
=
localDofMap
.
getOverallDofs
();
int
nRowsOverallB
=
localDofMap
.
getOverallDofs
();
Mat
matBPi
;
Mat
matBPi
;
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
...
@@ -596,7 +596,7 @@ namespace AMDiS {
...
@@ -596,7 +596,7 @@ namespace AMDiS {
MatGetInfo
(
mat_primal_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MatGetInfo
(
mat_primal_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
KSPCreate
(
*
mpiComm
,
&
ksp_schur_primal
);
KSPCreate
(
mpiComm
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_primal_primal
,
KSPSetOperators
(
ksp_schur_primal
,
mat_primal_primal
,
mat_primal_primal
,
SAME_NONZERO_PATTERN
);
mat_primal_primal
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
KSPSetOptionsPrefix
(
ksp_schur_primal
,
"schur_primal_"
);
...
@@ -646,24 +646,24 @@ namespace AMDiS {
...
@@ -646,24 +646,24 @@ namespace AMDiS {
fetiData
.
fetiSolver
=
this
;
fetiData
.
fetiSolver
=
this
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_b
));
&
(
fetiData
.
tmp_vec_b
));
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_lagrange
));
&
(
fetiData
.
tmp_vec_lagrange
));
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
(
fetiData
.
tmp_vec_primal
));
&
(
fetiData
.
tmp_vec_primal
));
MatCreateShell
(
*
mpiComm
,
MatCreateShell
(
mpiComm
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
&
fetiData
,
&
mat_feti
);
&
fetiData
,
&
mat_feti
);
MatShellSetOperation
(
mat_feti
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatFeti
);
MatShellSetOperation
(
mat_feti
,
MATOP_MULT
,
(
void
(
*
)(
void
))
petscMultMatFeti
);
KSPCreate
(
*
mpiComm
,
&
ksp_feti
);
KSPCreate
(
mpiComm
,
&
ksp_feti
);
KSPSetOperators
(
ksp_feti
,
mat_feti
,
mat_feti
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
ksp_feti
,
mat_feti
,
mat_feti
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_feti
,
"feti_"
);
KSPSetOptionsPrefix
(
ksp_feti
,
"feti_"
);
KSPSetType
(
ksp_feti
,
KSPGMRES
);
KSPSetType
(
ksp_feti
,
KSPGMRES
);
...
@@ -698,7 +698,7 @@ namespace AMDiS {
...
@@ -698,7 +698,7 @@ namespace AMDiS {
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
...
@@ -743,7 +743,7 @@ namespace AMDiS {
...
@@ -743,7 +743,7 @@ namespace AMDiS {
}
}
}
}
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
localDofMap
.
getOverallDofs
(),
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
...
@@ -938,17 +938,17 @@ namespace AMDiS {
...
@@ -938,17 +938,17 @@ namespace AMDiS {
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsRankB
,
nRowsRankB
,
60
,
PETSC_NULL
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsRankB
,
nRowsRankB
,
60
,
PETSC_NULL
,
&
mat_b_b
);
&
mat_b_b
);
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_primal_primal
);
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_primal_primal
);
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_b_primal
);
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
mat_b_primal
);
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsOverallPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
nRowsOverallB
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
mat_primal_b
);
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
mat_primal_b
);
...
@@ -1233,9 +1233,9 @@ namespace AMDiS {
...
@@ -1233,9 +1233,9 @@ namespace AMDiS {
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
f_b
);
localDofMap
.
getRankDofs
(),
localDofMap
.
getOverallDofs
(),
&
f_b
);
VecCreateMPI
(
*
mpiComm
,
VecCreateMPI
(
mpiComm
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getOverallDofs
(),
&
f_primal
);
&
f_primal
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
a33fd12b
...
@@ -53,7 +53,7 @@ namespace AMDiS {
...
@@ -53,7 +53,7 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nBlocks
;
i
++
)
for
(
int
i
=
0
;
i
<
nBlocks
;
i
++
)
for
(
int
j
=
0
;
j
<
nBlocks
;
j
++
)
for
(
int
j
=
0
;
j
<
nBlocks
;
j
++
)
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nRankRows
*
blockSize
[
i
],
nRankRows
*
blockSize
[
j
],
nRankRows
*
blockSize
[
i
],
nRankRows
*
blockSize
[
j
],
nOverallRows
*
blockSize
[
i
],
nOverallRows
*
blockSize
[
j
],
nOverallRows
*
blockSize
[
i
],
nOverallRows
*
blockSize
[
j
],
30
*
blockSize
[
i
],
PETSC_NULL
,
30
*
blockSize
[
i
],
PETSC_NULL
,
...
@@ -79,7 +79,7 @@ namespace AMDiS {
...
@@ -79,7 +79,7 @@ namespace AMDiS {
}
}
MatCreateNest
(
*
mpiComm
,
MatCreateNest
(
mpiComm
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
nBlocks
,
PETSC_NULL
,
&
(
nestMat
[
0
]),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
petscMatrix
);
...
@@ -91,7 +91,7 @@ namespace AMDiS {
...
@@ -91,7 +91,7 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
// === Init PETSc solver. ===
// === Init PETSc solver. ===
KSPCreate
(
*
mpiComm
,
&
solver
);
KSPCreate
(
mpiComm
,
&
solver
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
solver
);
KSPSetFromOptions
(
solver
);
...
@@ -113,7 +113,7 @@ namespace AMDiS {
...
@@ -113,7 +113,7 @@ namespace AMDiS {
nestVec
.
resize
(
nComponents
);
nestVec
.
resize
(
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
...
@@ -121,7 +121,7 @@ namespace AMDiS {
...
@@ -121,7 +121,7 @@ namespace AMDiS {
VecAssemblyEnd
(
nestVec
[
i
]);
VecAssemblyEnd
(
nestVec
[
i
]);
}
}
VecCreateNest
(
*
mpiComm
,
nComponents
,
PETSC_NULL
,
VecCreateNest
(
mpiComm
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
a33fd12b
...
@@ -31,8 +31,8 @@ namespace AMDiS {
...
@@ -31,8 +31,8 @@ namespace AMDiS {
// === Create PETSc vector (solution and a temporary vector). ===
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
testddd
=
1
;
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
Parameters
::
get
(
"block size"
,
testddd
);
...
@@ -70,7 +70,7 @@ namespace AMDiS {
...
@@ -70,7 +70,7 @@ namespace AMDiS {
// === Create PETSc matrix with the computed nnz data structure. ===
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ
(
*
mpiComm
,
nRankRows
,
nRankRows
,
MatCreateMPIAIJ
(
mpiComm
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_nnz
,
&
petscMatrix
);
0
,
d_nnz
,
0
,
o_nnz
,
&
petscMatrix
);
...
@@ -109,7 +109,7 @@ namespace AMDiS {
...
@@ -109,7 +109,7 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
// === Init PETSc solver. ===
// === Init PETSc solver. ===
KSPCreate
(
*
mpiComm
,
&
solver
);
KSPCreate
(
mpiComm
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
...
@@ -137,7 +137,7 @@ namespace AMDiS {
...
@@ -137,7 +137,7 @@ namespace AMDiS {
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
);
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
);
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
);
VecCreateMPI
(
*
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
int
testddd
=
1
;
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
Parameters
::
get
(
"block size"
,
testddd
);
...
@@ -155,7 +155,7 @@ namespace AMDiS {
...
@@ -155,7 +155,7 @@ namespace AMDiS {
if
(
removeRhsNullSpace
)
{
if
(
removeRhsNullSpace
)
{
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MatNullSpace
sp
;
MatNullSpace
sp
;
MatNullSpaceCreate
(
*
mpiComm
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
MatNullSpaceCreate
(
mpiComm
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
MatNullSpaceRemove
(
sp
,
petscRhsVec
,
PETSC_NULL
);
MatNullSpaceRemove
(
sp
,
petscRhsVec
,
PETSC_NULL
);
MatNullSpaceDestroy
(
&
sp
);
MatNullSpaceDestroy
(
&
sp
);
}
}
...
...
AMDiS/src/parallel/PetscSolverSchur.cc
View file @
a33fd12b
...
@@ -163,12 +163,12 @@ namespace AMDiS {
...
@@ -163,12 +163,12 @@ namespace AMDiS {
// === Create PETSc IS structurs for interior and boundary DOFs. ===
// === Create PETSc IS structurs for interior and boundary DOFs. ===
ISCreateStride
(
*
mpiComm
,
ISCreateStride
(
mpiComm
,
nInteriorDofs
*
nComponents
,
nInteriorDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
)
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
)
*
nComponents
,
1
,
&
interiorIs
);
1
,
&
interiorIs
);
ISCreateStride
(
*
mpiComm
,
ISCreateStride
(
mpiComm
,
nBoundaryDofs
*
nComponents
,
nBoundaryDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
+
nInteriorDofs
)
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
+
nInteriorDofs
)
*
nComponents
,
1
,
&
boundaryIs
);
1
,
&
boundaryIs
);
...
@@ -189,22 +189,22 @@ namespace AMDiS {
...
@@ -189,22 +189,22 @@ namespace AMDiS {
int
nOverallBoundaryRows
=
nOverallBoundaryDofs
*
nComponents
;
int
nOverallBoundaryRows
=
nOverallBoundaryDofs
*
nComponents
;
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nInteriorRows
,
nInteriorRows
,
nInteriorRows
,
nInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA11
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA11
);
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nBoundaryRows
,
nBoundaryRows
,
nBoundaryRows
,
nBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
nOverallBoundaryRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA22
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA22
);
MatCreateMPIAIJ
(
*
mpiComm
,
MatCreateMPIAIJ
(
mpiComm
,
nInteriorRows
,
nBoundaryRows
,
nInteriorRows
,
nBoundaryRows
,
nOverallInteriorRows
,
nOverallBoundaryRows
,
nOverallInteriorRows
,
nOverallBoundaryRows
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA12
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matA12
);