Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
479286fc
Commit
479286fc
authored
Mar 17, 2011
by
Thomas Witkowski
Browse files
Added parallel field split preconditioner for the Schur complement approach, part II.
parent
7d1db034
Changes
4
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/Makefile.am
View file @
479286fc
...
@@ -21,6 +21,7 @@ if USE_PARALLEL_DOMAIN_AMDIS
...
@@ -21,6 +21,7 @@ if USE_PARALLEL_DOMAIN_AMDIS
parallel/ParallelProblemStatBase.cc
\
parallel/ParallelProblemStatBase.cc
\
parallel/ParMetisPartitioner.cc
\
parallel/ParMetisPartitioner.cc
\
parallel/PetscSolver.cc
\
parallel/PetscSolver.cc
\
parallel/PetscSolverSchur.cc
\
parallel/StdMpi.cc
\
parallel/StdMpi.cc
\
parallel/ZoltanPartitioner.cc
parallel/ZoltanPartitioner.cc
libamdis_la_CXXFLAGS
+=
-DHAVE_PARALLEL_DOMAIN_AMDIS
=
1
libamdis_la_CXXFLAGS
+=
-DHAVE_PARALLEL_DOMAIN_AMDIS
=
1
...
@@ -247,6 +248,7 @@ parallel/ParallelDebug.h \
...
@@ -247,6 +248,7 @@ parallel/ParallelDebug.h \
parallel/ParallelProblemStatBase.h
\
parallel/ParallelProblemStatBase.h
\
parallel/ParMetisPartitioner.h
\
parallel/ParMetisPartitioner.h
\
parallel/PetscSolver.h
\
parallel/PetscSolver.h
\
parallel/PetscSolverSchur.h
\
parallel/StdMpi.h
\
parallel/StdMpi.h
\
parallel/ZoltanPartitioner.h
\
parallel/ZoltanPartitioner.h
\
reinit/BoundaryElementDist.h
\
reinit/BoundaryElementDist.h
\
...
...
AMDiS/src/Makefile.in
View file @
479286fc
...
@@ -47,6 +47,7 @@ host_triplet = @host@
...
@@ -47,6 +47,7 @@ host_triplet = @host@
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParallelProblemStatBase.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParallelProblemStatBase.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverSchur.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ZoltanPartitioner.cc
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ZoltanPartitioner.cc
...
@@ -90,10 +91,11 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
...
@@ -90,10 +91,11 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
parallel/MeshPartitioner.cc parallel/MpiHelper.cc \
parallel/MeshPartitioner.cc parallel/MpiHelper.cc \
parallel/ParallelDebug.cc parallel/ParallelProblemStatBase.cc \
parallel/ParallelDebug.cc parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc parallel/PetscSolver.cc \
parallel/ParMetisPartitioner.cc parallel/PetscSolver.cc \
parallel/StdMpi.cc parallel/ZoltanPartitioner.cc AdaptBase.cc
\
parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
AdaptInfo.cc AdaptInstationary.cc AdaptStationary.cc
\
parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
Assembler.cc BasisFunction.cc Boundary.cc BoundaryManager.cc
\
AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
Cholesky.cc CoarseningManager.cc CoarseningManager1d.cc
\
BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
CoarseningManager.cc CoarseningManager1d.cc \
CoarseningManager2d.cc CoarseningManager3d.cc \
CoarseningManager2d.cc CoarseningManager3d.cc \
ComponentTraverseInfo.cc CreatorMap.cc DOFAdmin.cc \
ComponentTraverseInfo.cc CreatorMap.cc DOFAdmin.cc \
DOFIndexed.cc DOFMatrix.cc DOFVector.cc Debug.cc \
DOFIndexed.cc DOFMatrix.cc DOFVector.cc Debug.cc \
...
@@ -135,6 +137,7 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
...
@@ -135,6 +137,7 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParallelProblemStatBase.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParallelProblemStatBase.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverSchur.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ZoltanPartitioner.lo
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ZoltanPartitioner.lo
am__objects_2 = $(am__objects_1)
am__objects_2 = $(am__objects_1)
...
@@ -570,6 +573,7 @@ parallel/ParallelDebug.h \
...
@@ -570,6 +573,7 @@ parallel/ParallelDebug.h \
parallel/ParallelProblemStatBase.h \
parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/ParMetisPartitioner.h \
parallel/PetscSolver.h \
parallel/PetscSolver.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
reinit/BoundaryElementDist.h \
...
@@ -874,6 +878,7 @@ distclean-compile:
...
@@ -874,6 +878,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-Parametric.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-Parametric.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
...
@@ -1029,6 +1034,13 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
...
@@ -1029,6 +1034,13 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
479286fc
...
@@ -108,9 +108,8 @@ namespace AMDiS {
...
@@ -108,9 +108,8 @@ namespace AMDiS {
int
colIndex
=
globalColDof
*
dispMult
+
dispAddCol
;
int
colIndex
=
globalColDof
*
dispMult
+
dispAddCol
;
// Ignore all zero entries, expect it is a diagonal entry.
// Ignore all zero entries, expect it is a diagonal entry.
// if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
if
(
value
(
*
icursor
)
==
0.0
&&
rowIndex
!=
colIndex
)
// if (value(*icursor) == 0.0 && rowIndex != colIndex)
continue
;
// continue;
if
(
!
periodicCol
)
{
if
(
!
periodicCol
)
{
// Calculate the exact position of the column index in the PETSc matrix.
// Calculate the exact position of the column index in the PETSc matrix.
...
@@ -182,8 +181,8 @@ namespace AMDiS {
...
@@ -182,8 +181,8 @@ namespace AMDiS {
int
globalColDof
=
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
));
int
globalColDof
=
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
));
// Ignore all zero entries, expect it is a diagonal entry.
// Ignore all zero entries, expect it is a diagonal entry.
//
if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
if
(
value
(
*
icursor
)
==
0.0
&&
globalRowDof
!=
globalColDof
)
//
continue;
continue
;
// === Add all periodic associations of both, the row and the column ===
// === Add all periodic associations of both, the row and the column ===
...
@@ -410,7 +409,7 @@ namespace AMDiS {
...
@@ -410,7 +409,7 @@ namespace AMDiS {
int
petscColIdx
=
int
petscColIdx
=
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
))
*
nComponents
+
j
;
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
))
*
nComponents
+
j
;
//
if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the d_nnz values for this row, otherwise the o_nnz value.
// increment the d_nnz values for this row, otherwise the o_nnz value.
if
(
petscColIdx
>=
meshDistributor
->
getRstart
()
*
nComponents
&&
if
(
petscColIdx
>=
meshDistributor
->
getRstart
()
*
nComponents
&&
...
@@ -418,7 +417,7 @@ namespace AMDiS {
...
@@ -418,7 +417,7 @@ namespace AMDiS {
d_nnz
[
localPetscRowIdx
]
++
;
d_nnz
[
localPetscRowIdx
]
++
;
else
else
o_nnz
[
localPetscRowIdx
]
++
;
o_nnz
[
localPetscRowIdx
]
++
;
//
}
}
}
}
}
else
{
}
else
{
// === The current row DOF is not a rank dof, i.e., it will be created ===
// === The current row DOF is not a rank dof, i.e., it will be created ===
...
@@ -431,13 +430,13 @@ namespace AMDiS {
...
@@ -431,13 +430,13 @@ namespace AMDiS {
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
//
if (value(*icursor) != 0.0) {
if
(
value
(
*
icursor
)
!=
0.0
)
{
int
petscColIdx
=
int
petscColIdx
=
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
))
*
nComponents
+
j
;
meshDistributor
->
mapLocalToGlobal
(
col
(
*
icursor
))
*
nComponents
+
j
;
sendMatrixEntry
[
sendToRank
].
sendMatrixEntry
[
sendToRank
].
push_back
(
make_pair
(
petscRowIdx
,
petscColIdx
));
push_back
(
make_pair
(
petscRowIdx
,
petscColIdx
));
//
}
}
}
}
}
// if (isRankDof[*cursor]) ... else ...
}
// if (isRankDof[*cursor]) ... else ...
...
@@ -603,55 +602,7 @@ namespace AMDiS {
...
@@ -603,55 +602,7 @@ namespace AMDiS {
// Do not delete the solution vector, use it for the initial guess.
// Do not delete the solution vector, use it for the initial guess.
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
PCSetType
(
pc
,
PCFIELDSPLIT
);
#if 0
typedef map<int, DofContainer> RankToDofContainer;
typedef map<DegreeOfFreedom, bool> DofIndexToBool;
std::set<DegreeOfFreedom> boundaryDofsSet;
std::set<DegreeOfFreedom> boundaryLocalDofs;
RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
for (RankToDofContainer::iterator rankIt = sendDofs.begin(); rankIt != sendDofs.end(); ++rankIt)
for (DofContainer::iterator dofIt = rankIt->second.begin(); dofIt != rankIt->second.end(); ++dofIt) {
boundaryLocalDofs.insert(**dofIt);
for (int i = 0; i < nComponents; i++)
boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
}
vector<DegreeOfFreedom> boundaryDofs(boundaryDofsSet.begin(), boundaryDofsSet.end());
std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
for (RankToDofContainer::iterator rankIt = recvDofs.begin(); rankIt != recvDofs.end(); ++rankIt)
for (DofContainer::iterator dofIt = rankIt->second.begin(); dofIt != rankIt->second.end(); ++dofIt)
otherBoundaryLocalDofs.insert(**dofIt);
std::set<DegreeOfFreedom> interiorDofsSet;
DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
for (DofIndexToBool::iterator dofIt = isRankDof.begin(); dofIt != isRankDof.end(); ++dofIt)
if (dofIt->second &&
boundaryLocalDofs.count(dofIt->first) == 0 &&
otherBoundaryLocalDofs.count(dofIt->first) == 0)
for (int i = 0; i < nComponents; i++)
interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
vector<DegreeOfFreedom> interiorDofs(interiorDofsSet.begin(), interiorDofsSet.end());
IS interiorIs;
ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]), PETSC_COPY_VALUES, &interiorIs);
PCFieldSplitSetIS(pc, "interior", interiorIs);
IS boundaryIs;
ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]), PETSC_COPY_VALUES, &boundaryIs);
PCFieldSplitSetIS(pc, "boundary", boundaryIs);
#endif
// === Run PETSc. ===
// === Run PETSc. ===
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
479286fc
...
@@ -77,6 +77,7 @@ namespace AMDiS {
...
@@ -77,6 +77,7 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
/// Use PETSc to solve the linear system of equations
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
private:
/// Is used to create a PETSc solver and preconditioner object. This function may
/// Is used to create a PETSc solver and preconditioner object. This function may
/// be overridden to create some special PETSc solver.
/// be overridden to create some special PETSc solver.
virtual
void
providePetscSolver
(
KSP
&
solver
,
PC
&
pc
);
virtual
void
providePetscSolver
(
KSP
&
solver
,
PC
&
pc
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment