Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit 479286fc authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Added parallel field split preconditioner for the Schur complement approach, part II.

parent 7d1db034
......@@ -21,6 +21,7 @@ if USE_PARALLEL_DOMAIN_AMDIS
parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc \
parallel/PetscSolver.cc \
parallel/PetscSolverSchur.cc \
parallel/StdMpi.cc \
parallel/ZoltanPartitioner.cc
libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
......@@ -247,6 +248,7 @@ parallel/ParallelDebug.h \
parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscSolver.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......
......@@ -47,6 +47,7 @@ host_triplet = @host@
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParallelProblemStatBase.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverSchur.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ZoltanPartitioner.cc
......@@ -90,10 +91,11 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
parallel/MeshPartitioner.cc parallel/MpiHelper.cc \
parallel/ParallelDebug.cc parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc parallel/PetscSolver.cc \
parallel/StdMpi.cc parallel/ZoltanPartitioner.cc AdaptBase.cc \
AdaptInfo.cc AdaptInstationary.cc AdaptStationary.cc \
Assembler.cc BasisFunction.cc Boundary.cc BoundaryManager.cc \
Cholesky.cc CoarseningManager.cc CoarseningManager1d.cc \
parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
CoarseningManager.cc CoarseningManager1d.cc \
CoarseningManager2d.cc CoarseningManager3d.cc \
ComponentTraverseInfo.cc CreatorMap.cc DOFAdmin.cc \
DOFIndexed.cc DOFMatrix.cc DOFVector.cc Debug.cc \
......@@ -135,6 +137,7 @@ am__libamdis_la_SOURCES_DIST = parallel/ElementObjectData.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParallelProblemStatBase.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverSchur.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ZoltanPartitioner.lo
am__objects_2 = $(am__objects_1)
......@@ -570,6 +573,7 @@ parallel/ParallelDebug.h \
parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscSolver.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......@@ -874,6 +878,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-Parametric.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
......@@ -1029,6 +1034,13 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
......
......@@ -108,9 +108,8 @@ namespace AMDiS {
int colIndex = globalColDof * dispMult + dispAddCol;
// Ignore all zero entries, expect it is a diagonal entry.
// if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
// if (value(*icursor) == 0.0 && rowIndex != colIndex)
// continue;
if (value(*icursor) == 0.0 && rowIndex != colIndex)
continue;
if (!periodicCol) {
// Calculate the exact position of the column index in the PETSc matrix.
......@@ -182,8 +181,8 @@ namespace AMDiS {
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
// Ignore all zero entries, expect it is a diagonal entry.
// if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
// continue;
if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
continue;
// === Add all periodic associations of both, the row and the column ===
......@@ -410,7 +409,7 @@ namespace AMDiS {
int petscColIdx =
meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
// if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the d_nnz values for this row, otherwise the o_nnz value.
if (petscColIdx >= meshDistributor->getRstart() * nComponents &&
......@@ -418,7 +417,7 @@ namespace AMDiS {
d_nnz[localPetscRowIdx]++;
else
o_nnz[localPetscRowIdx]++;
// }
}
}
} else {
// === The current row DOF is not a rank dof, i.e., it will be created ===
......@@ -431,13 +430,13 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor),
icend = end<nz>(cursor); icursor != icend; ++icursor) {
// if (value(*icursor) != 0.0) {
if (value(*icursor) != 0.0) {
int petscColIdx =
meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
sendMatrixEntry[sendToRank].
push_back(make_pair(petscRowIdx, petscColIdx));
// }
}
}
} // if (isRankDof[*cursor]) ... else ...
......@@ -603,55 +602,7 @@ namespace AMDiS {
// Do not delete the solution vector, use it for the initial guess.
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
PCSetType(pc, PCFIELDSPLIT);
#if 0
typedef map<int, DofContainer> RankToDofContainer;
typedef map<DegreeOfFreedom, bool> DofIndexToBool;
std::set<DegreeOfFreedom> boundaryDofsSet;
std::set<DegreeOfFreedom> boundaryLocalDofs;
RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
for (RankToDofContainer::iterator rankIt = sendDofs.begin(); rankIt != sendDofs.end(); ++rankIt)
for (DofContainer::iterator dofIt = rankIt->second.begin(); dofIt != rankIt->second.end(); ++dofIt) {
boundaryLocalDofs.insert(**dofIt);
for (int i = 0; i < nComponents; i++)
boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
}
vector<DegreeOfFreedom> boundaryDofs(boundaryDofsSet.begin(), boundaryDofsSet.end());
std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
for (RankToDofContainer::iterator rankIt = recvDofs.begin(); rankIt != recvDofs.end(); ++rankIt)
for (DofContainer::iterator dofIt = rankIt->second.begin(); dofIt != rankIt->second.end(); ++dofIt)
otherBoundaryLocalDofs.insert(**dofIt);
std::set<DegreeOfFreedom> interiorDofsSet;
DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
for (DofIndexToBool::iterator dofIt = isRankDof.begin(); dofIt != isRankDof.end(); ++dofIt)
if (dofIt->second &&
boundaryLocalDofs.count(dofIt->first) == 0 &&
otherBoundaryLocalDofs.count(dofIt->first) == 0)
for (int i = 0; i < nComponents; i++)
interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
vector<DegreeOfFreedom> interiorDofs(interiorDofsSet.begin(), interiorDofsSet.end());
IS interiorIs;
ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]), PETSC_COPY_VALUES, &interiorIs);
PCFieldSplitSetIS(pc, "interior", interiorIs);
IS boundaryIs;
ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]), PETSC_COPY_VALUES, &boundaryIs);
PCFieldSplitSetIS(pc, "boundary", boundaryIs);
#endif
// === Run PETSc. ===
KSPSolve(solver, petscRhsVec, petscSolVec);
......
......@@ -77,6 +77,7 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
private:
/// Is used to create a PETSc solver and preconditioner object. This function may
/// be overridden to create some special PETSc solver.
virtual void providePetscSolver(KSP &solver, PC &pc);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment