Commit 713cf136 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Splitted PETSc solver.

parent 8881aea2
...@@ -206,6 +206,8 @@ if(ENABLE_PARALLEL_DOMAIN) ...@@ -206,6 +206,8 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/ParMetisPartitioner.cc ${SOURCE_DIR}/parallel/ParMetisPartitioner.cc
${SOURCE_DIR}/parallel/PetscProblemStat.cc ${SOURCE_DIR}/parallel/PetscProblemStat.cc
${SOURCE_DIR}/parallel/PetscSolver.cc ${SOURCE_DIR}/parallel/PetscSolver.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc
${SOURCE_DIR}/parallel/StdMpi.cc ${SOURCE_DIR}/parallel/StdMpi.cc
${SOURCE_DIR}/parallel/ZoltanPartitioner.cc) ${SOURCE_DIR}/parallel/ZoltanPartitioner.cc)
SET(COMPILEFLAGS "${COMPILEFLAGS} -DHAVE_PARALLEL_DOMAIN_AMDIS=1") SET(COMPILEFLAGS "${COMPILEFLAGS} -DHAVE_PARALLEL_DOMAIN_AMDIS=1")
......
...@@ -44,7 +44,7 @@ available_tags=" CXX F77" ...@@ -44,7 +44,7 @@ available_tags=" CXX F77"
# ### BEGIN LIBTOOL CONFIG # ### BEGIN LIBTOOL CONFIG
# Libtool was configured on host deimos104: # Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts. # Shell to use when invoking shell scripts.
SHELL="/bin/sh" SHELL="/bin/sh"
...@@ -6760,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac` ...@@ -6760,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
# End: # End:
# ### BEGIN LIBTOOL TAG CONFIG: CXX # ### BEGIN LIBTOOL TAG CONFIG: CXX
# Libtool was configured on host deimos104: # Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts. # Shell to use when invoking shell scripts.
SHELL="/bin/sh" SHELL="/bin/sh"
...@@ -7065,7 +7065,7 @@ include_expsyms="" ...@@ -7065,7 +7065,7 @@ include_expsyms=""
# ### BEGIN LIBTOOL TAG CONFIG: F77 # ### BEGIN LIBTOOL TAG CONFIG: F77
# Libtool was configured on host deimos104: # Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts. # Shell to use when invoking shell scripts.
SHELL="/bin/sh" SHELL="/bin/sh"
......
...@@ -23,6 +23,8 @@ if USE_PARALLEL_DOMAIN_AMDIS ...@@ -23,6 +23,8 @@ if USE_PARALLEL_DOMAIN_AMDIS
parallel/ParMetisPartitioner.cc \ parallel/ParMetisPartitioner.cc \
parallel/PetscProblemStat.cc \ parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc \ parallel/PetscSolver.cc \
parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc \
parallel/StdMpi.cc parallel/StdMpi.cc
libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1 libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
AMDIS_INCLUDES += -I$(PARMETIS_DIR) AMDIS_INCLUDES += -I$(PARMETIS_DIR)
...@@ -257,6 +259,8 @@ parallel/ParallelProblemStatBase.h \ ...@@ -257,6 +259,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \ parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \ parallel/PetscProblemStat.h \
parallel/PetscSolver.h \ parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \ parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \ parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \ reinit/BoundaryElementDist.h \
......
...@@ -49,6 +49,8 @@ host_triplet = @host@ ...@@ -49,6 +49,8 @@ host_triplet = @host@
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscProblemStat.cc \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscProblemStat.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverGlobalMatrix.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverSchur.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1 @USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1
...@@ -95,7 +97,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \ ...@@ -95,7 +97,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
parallel/MpiHelper.cc parallel/ParallelDebug.cc \ parallel/MpiHelper.cc parallel/ParallelDebug.cc \
parallel/ParallelProblemStatBase.cc \ parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \ parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc parallel/StdMpi.cc \ parallel/PetscSolver.cc parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \ parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
AdaptInstationary.cc AdaptStationary.cc Assembler.cc \ AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \ BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
...@@ -142,6 +145,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \ ...@@ -142,6 +145,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscProblemStat.lo \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscProblemStat.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \ @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverGlobalMatrix.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverSchur.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo @USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo
@USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo @USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo
am__objects_3 = $(am__objects_1) $(am__objects_2) am__objects_3 = $(am__objects_1) $(am__objects_2)
...@@ -577,6 +582,8 @@ parallel/ParallelProblemStatBase.h \ ...@@ -577,6 +582,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \ parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \ parallel/PetscProblemStat.h \
parallel/PetscSolver.h \ parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \ parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \ parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \ reinit/BoundaryElementDist.h \
...@@ -881,6 +888,8 @@ distclean-compile: ...@@ -881,6 +888,8 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
...@@ -1048,6 +1057,20 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc ...@@ -1048,6 +1057,20 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc @am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
libamdis_la-PetscSolverGlobalMatrix.lo: parallel/PetscSolverGlobalMatrix.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverGlobalMatrix.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverGlobalMatrix.cc' object='libamdis_la-PetscSolverGlobalMatrix.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc
libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc libamdis_la-StdMpi.lo: parallel/StdMpi.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \ @am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi @am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
......
...@@ -55,6 +55,9 @@ namespace AMDiS { ...@@ -55,6 +55,9 @@ namespace AMDiS {
using namespace boost::filesystem; using namespace boost::filesystem;
using namespace std; using namespace std;
const Flag MeshDistributor::BOUNDARY_SUBOBJ_SORTED = 0X01L;
const Flag MeshDistributor::BOUNDARY_EDGE_SCHUR = 0X02L;
inline bool cmpDofsByValue(const DegreeOfFreedom* dof1, const DegreeOfFreedom* dof2) inline bool cmpDofsByValue(const DegreeOfFreedom* dof1, const DegreeOfFreedom* dof2)
{ {
return (*dof1 < *dof2); return (*dof1 < *dof2);
...@@ -80,7 +83,8 @@ namespace AMDiS { ...@@ -80,7 +83,8 @@ namespace AMDiS {
nMeshChangesAfterLastRepartitioning(0), nMeshChangesAfterLastRepartitioning(0),
repartitioningCounter(0), repartitioningCounter(0),
debugOutputDir(""), debugOutputDir(""),
lastMeshChangeIndex(0) lastMeshChangeIndex(0),
createBoundaryDofFlag(0)
{ {
FUNCNAME("MeshDistributor::ParalleDomainBase()"); FUNCNAME("MeshDistributor::ParalleDomainBase()");
...@@ -1637,6 +1641,50 @@ namespace AMDiS { ...@@ -1637,6 +1641,50 @@ namespace AMDiS {
} }
void MeshDistributor::createBoundaryDofs()
{
FUNCNAME("MeshDistributor::createBoundaryDofs()");
sendDofs.clear();
recvDofs.clear();
if (createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) {
MSG("WITH BOUNDARY SUBOBJ SORTED!\n");
DofContainer dofs;
for (int geo = FACE; geo >= VERTEX; geo--) {
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] = 0;
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
if (it->rankObj.subObj == geo) {
dofs.clear();
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] += dofs.size();
DofContainer &tmp = sendDofs[it.getRank()];
tmp.insert(tmp.end(), dofs.begin(), dofs.end());
}
}
}
for (int geo = FACE; geo >= VERTEX; geo--)
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
if (it->rankObj.subObj == geo)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
} else {
MSG("GAAAAAAANZ NORMAL!\n");
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
sendDofs[it.getRank()]);
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
}
}
void MeshDistributor::removeMacroElements() void MeshDistributor::removeMacroElements()
{ {
FUNCNAME("MeshDistributor::removeMacroElements()"); FUNCNAME("MeshDistributor::removeMacroElements()");
...@@ -1662,9 +1710,6 @@ namespace AMDiS { ...@@ -1662,9 +1710,6 @@ namespace AMDiS {
debug::createSortedDofs(mesh, elMap); debug::createSortedDofs(mesh, elMap);
#endif #endif
sendDofs.clear();
recvDofs.clear();
// === Get all DOFs in ranks partition. === // === Get all DOFs in ranks partition. ===
...@@ -1677,31 +1722,22 @@ namespace AMDiS { ...@@ -1677,31 +1722,22 @@ namespace AMDiS {
// === Traverse interior boundaries and get all DOFs on them. === // === Traverse interior boundaries and get all DOFs on them. ===
createBoundaryDofs();
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) { // All DOFs that must be received are DOFs not owned by rank and have
DofContainer dofs; // therefore to be removed from the set 'rankDofs'.
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs); for (RankToDofContainer::iterator recvIt = recvDofs.begin();
recvIt != recvDofs.end(); ++recvIt) {
for (unsigned int i = 0; i < dofs.size(); i++) for (DofContainer::iterator dofIt = recvIt->second.begin();
sendDofs[it.getRank()].push_back(dofs[i]); dofIt != recvIt->second.end(); ++dofIt) {
}
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) {
DofContainer dofs;
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++) {
DofContainer::iterator eraseIt = DofContainer::iterator eraseIt =
find(rankDofs.begin(), rankDofs.end(), dofs[i]); find(rankDofs.begin(), rankDofs.end(), *dofIt);
if (eraseIt != rankDofs.end()) if (eraseIt != rankDofs.end())
rankDofs.erase(eraseIt); rankDofs.erase(eraseIt);
recvDofs[it.getRank()].push_back(dofs[i]);
} }
} }
// Get displacment for global rank DOF ordering and global DOF number. // Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size(); nRankDofs = rankDofs.size();
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs); mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
......
...@@ -44,7 +44,14 @@ ...@@ -44,7 +44,14 @@
namespace AMDiS { namespace AMDiS {
using namespace std; using namespace std;
struct BoundaryDofInfo
{
map<GeoIndex, int> nGeoDofs;
};
class MeshDistributor class MeshDistributor
{ {
protected: protected:
...@@ -293,6 +300,11 @@ namespace AMDiS { ...@@ -293,6 +300,11 @@ namespace AMDiS {
void check3dValidMesh(); void check3dValidMesh();
void setBoundaryDofRequirement(Flag flag)
{
createBoundaryDofFlag = flag;
}
protected: protected:
/** \brief /** \brief
* Determines the interior boundaries, i.e. boundaries between ranks, and stores * Determines the interior boundaries, i.e. boundaries between ranks, and stores
...@@ -306,6 +318,8 @@ namespace AMDiS { ...@@ -306,6 +318,8 @@ namespace AMDiS {
void createBoundaryData(); void createBoundaryData();
void createBoundaryDofs();
/// Removes all macro elements from the mesh that are not part of ranks partition. /// Removes all macro elements from the mesh that are not part of ranks partition.
void removeMacroElements(); void removeMacroElements();
...@@ -599,6 +613,15 @@ namespace AMDiS { ...@@ -599,6 +613,15 @@ namespace AMDiS {
/// redistributed for the first time. /// redistributed for the first time.
vector<MacroElement*> allMacroElements; vector<MacroElement*> allMacroElements;
Flag createBoundaryDofFlag;
BoundaryDofInfo boundaryDofInfo;
public:
///
static const Flag BOUNDARY_SUBOBJ_SORTED;
static const Flag BOUNDARY_EDGE_SCHUR;
friend class ParallelDebug; friend class ParallelDebug;
}; };
} }
......
...@@ -37,12 +37,13 @@ namespace AMDiS { ...@@ -37,12 +37,13 @@ namespace AMDiS {
meshDistributor(NULL) meshDistributor(NULL)
{} {}
virtual ~ParallelProblemStatBase() {}
void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag, void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix, bool assembleMatrix,
bool assembleVector); bool assembleVector);
void addToMeshDistributor(MeshDistributor& m); virtual void addToMeshDistributor(MeshDistributor& m);
protected: protected:
void processMemUsage(double& vm_usage, double& resident_set); void processMemUsage(double& vm_usage, double& resident_set);
......
...@@ -27,6 +27,8 @@ ...@@ -27,6 +27,8 @@
#include "Global.h" #include "Global.h"
#include "parallel/ParallelProblemStatBase.h" #include "parallel/ParallelProblemStatBase.h"
#include "parallel/PetscSolver.h" #include "parallel/PetscSolver.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "parallel/PetscSolverSchur.h"
namespace AMDiS { namespace AMDiS {
...@@ -58,6 +60,13 @@ namespace AMDiS { ...@@ -58,6 +60,13 @@ namespace AMDiS {
delete petscSolver; delete petscSolver;
} }
void addToMeshDistributor(MeshDistributor& m)
{
ParallelProblemStatBase::addToMeshDistributor(m);
meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
}
void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false); void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
protected: protected:
......
...@@ -18,15 +18,6 @@ namespace AMDiS { ...@@ -18,15 +18,6 @@ namespace AMDiS {
using namespace std; using namespace std;
PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
{
if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
cout << "[0] Petsc-Iteration " << iter << ": " << rnorm << endl;
return 0;
}
void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo, void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo,
bool iterationCounter, bool iterationCounter,
bool residual) bool residual)
...@@ -49,1001 +40,4 @@ namespace AMDiS { ...@@ -49,1001 +40,4 @@ namespace AMDiS {
} }
} }
void PetscSolverGlobalMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
double wtime = MPI::Wtime();
int nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
// === Create PETSc vector (rhs, solution and a temporary vector). ===
VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
VecSetType(petscRhsVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscSolVec);
VecSetSizes(petscSolVec, nRankRows, nOverallRows);
VecSetType(petscSolVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
VecSetType(petscTmpVec, VECMPI);
int recvAllValues = 0;
int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
if (!d_nnz || recvAllValues != 0) {
if (d_nnz) {
delete [] d_nnz;
d_nnz = NULL;
delete [] o_nnz;
o_nnz = NULL;
}
createPetscNnzStructure(mat);
lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
}
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
0, d_nnz, 0, o_nnz, &petscMatrix);
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
#if (DEBUG != 0)
int a, b;
MatGetOwnershipRange(petscMatrix, &a, &b);
TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
("Wrong matrix ownership range!\n");
TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
("Wrong matrix ownership range!\n");
#endif
// === Transfer values from DOF matrices to the PETSc matrix. ===
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j])
setDofMatrix((*mat)[i][j], nComponents, i, j);
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
#if (DEBUG != 0)
MSG("Fill petsc matrix 3 needed %.5f seconds\n",
TIME_USED(MPI::Wtime(), wtime));
#endif
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < nComponents; i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
}
void PetscSolverGlobalMatrix::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
{
FUNCNAME("PetscSolverGlobalMatrix::solvePetscMatrix()");
int nComponents = vec.getSize();
// === Set old solution to be initiual guess for PETSc solver. ===
if (!zeroStartVector) {
VecSet(petscSolVec, 0.0);
for (int i = 0; i < nComponents; i++)
setDofVector(petscSolVec, vec.getDOFVector(i), nComponents, i, true);
VecAssemblyBegin(petscSolVec);
VecAssemblyEnd(petscSolVec);
}
// === Init PETSc solver. ===
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
// Do not delete the solution vector, use it for the initial guess.
if (!zeroStartVector)
KSPSetInitialGuessNonzero(solver, PETSC_TRUE);