Liebe Gitlab-Nutzerin, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit 713cf136 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Splitted PETSc solver.

parent 8881aea2
......@@ -206,6 +206,8 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/ParMetisPartitioner.cc
${SOURCE_DIR}/parallel/PetscProblemStat.cc
${SOURCE_DIR}/parallel/PetscSolver.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc
${SOURCE_DIR}/parallel/StdMpi.cc
${SOURCE_DIR}/parallel/ZoltanPartitioner.cc)
SET(COMPILEFLAGS "${COMPILEFLAGS} -DHAVE_PARALLEL_DOMAIN_AMDIS=1")
......
......@@ -44,7 +44,7 @@ available_tags=" CXX F77"
# ### BEGIN LIBTOOL CONFIG
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......@@ -6760,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
# End:
# ### BEGIN LIBTOOL TAG CONFIG: CXX
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......@@ -7065,7 +7065,7 @@ include_expsyms=""
# ### BEGIN LIBTOOL TAG CONFIG: F77
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......
......@@ -23,6 +23,8 @@ if USE_PARALLEL_DOMAIN_AMDIS
parallel/ParMetisPartitioner.cc \
parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc \
parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc \
parallel/StdMpi.cc
libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
AMDIS_INCLUDES += -I$(PARMETIS_DIR)
......@@ -257,6 +259,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \
parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......
......@@ -49,6 +49,8 @@ host_triplet = @host@
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscProblemStat.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverGlobalMatrix.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverSchur.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1
......@@ -95,7 +97,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
parallel/MpiHelper.cc parallel/ParallelDebug.cc \
parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc parallel/StdMpi.cc \
parallel/PetscSolver.cc parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
......@@ -142,6 +145,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscProblemStat.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverGlobalMatrix.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverSchur.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo
@USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo
am__objects_3 = $(am__objects_1) $(am__objects_2)
......@@ -577,6 +582,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \
parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......@@ -881,6 +888,8 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
......@@ -1048,6 +1057,20 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
libamdis_la-PetscSolverGlobalMatrix.lo: parallel/PetscSolverGlobalMatrix.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverGlobalMatrix.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverGlobalMatrix.cc' object='libamdis_la-PetscSolverGlobalMatrix.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc
libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
......
......@@ -55,6 +55,9 @@ namespace AMDiS {
using namespace boost::filesystem;
using namespace std;
const Flag MeshDistributor::BOUNDARY_SUBOBJ_SORTED = 0X01L;
const Flag MeshDistributor::BOUNDARY_EDGE_SCHUR = 0X02L;
inline bool cmpDofsByValue(const DegreeOfFreedom* dof1, const DegreeOfFreedom* dof2)
{
return (*dof1 < *dof2);
......@@ -80,7 +83,8 @@ namespace AMDiS {
nMeshChangesAfterLastRepartitioning(0),
repartitioningCounter(0),
debugOutputDir(""),
lastMeshChangeIndex(0)
lastMeshChangeIndex(0),
createBoundaryDofFlag(0)
{
FUNCNAME("MeshDistributor::ParalleDomainBase()");
......@@ -1637,6 +1641,50 @@ namespace AMDiS {
}
void MeshDistributor::createBoundaryDofs()
{
FUNCNAME("MeshDistributor::createBoundaryDofs()");
sendDofs.clear();
recvDofs.clear();
if (createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) {
MSG("WITH BOUNDARY SUBOBJ SORTED!\n");
DofContainer dofs;
for (int geo = FACE; geo >= VERTEX; geo--) {
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] = 0;
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
if (it->rankObj.subObj == geo) {
dofs.clear();
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] += dofs.size();
DofContainer &tmp = sendDofs[it.getRank()];
tmp.insert(tmp.end(), dofs.begin(), dofs.end());
}
}
}
for (int geo = FACE; geo >= VERTEX; geo--)
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
if (it->rankObj.subObj == geo)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
} else {
MSG("GAAAAAAANZ NORMAL!\n");
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
sendDofs[it.getRank()]);
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
}
}
void MeshDistributor::removeMacroElements()
{
FUNCNAME("MeshDistributor::removeMacroElements()");
......@@ -1662,9 +1710,6 @@ namespace AMDiS {
debug::createSortedDofs(mesh, elMap);
#endif
sendDofs.clear();
recvDofs.clear();
// === Get all DOFs in ranks partition. ===
......@@ -1677,31 +1722,22 @@ namespace AMDiS {
// === Traverse interior boundaries and get all DOFs on them. ===
createBoundaryDofs();
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
DofContainer dofs;
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++)
sendDofs[it.getRank()].push_back(dofs[i]);
}
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) {
DofContainer dofs;
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++) {
// All DOFs that must be received are DOFs not owned by rank and have
// therefore to be removed from the set 'rankDofs'.
for (RankToDofContainer::iterator recvIt = recvDofs.begin();
recvIt != recvDofs.end(); ++recvIt) {
for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) {
DofContainer::iterator eraseIt =
find(rankDofs.begin(), rankDofs.end(), dofs[i]);
find(rankDofs.begin(), rankDofs.end(), *dofIt);
if (eraseIt != rankDofs.end())
rankDofs.erase(eraseIt);
recvDofs[it.getRank()].push_back(dofs[i]);
}
}
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size();
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
......
......@@ -44,7 +44,14 @@
namespace AMDiS {
using namespace std;
struct BoundaryDofInfo
{
map<GeoIndex, int> nGeoDofs;
};
class MeshDistributor
{
protected:
......@@ -293,6 +300,11 @@ namespace AMDiS {
void check3dValidMesh();
void setBoundaryDofRequirement(Flag flag)
{
createBoundaryDofFlag = flag;
}
protected:
/** \brief
* Determines the interior boundaries, i.e. boundaries between ranks, and stores
......@@ -306,6 +318,8 @@ namespace AMDiS {
void createBoundaryData();
void createBoundaryDofs();
/// Removes all macro elements from the mesh that are not part of ranks partition.
void removeMacroElements();
......@@ -599,6 +613,15 @@ namespace AMDiS {
/// redistributed for the first time.
vector<MacroElement*> allMacroElements;
Flag createBoundaryDofFlag;
BoundaryDofInfo boundaryDofInfo;
public:
///
static const Flag BOUNDARY_SUBOBJ_SORTED;
static const Flag BOUNDARY_EDGE_SCHUR;
friend class ParallelDebug;
};
}
......
......@@ -37,12 +37,13 @@ namespace AMDiS {
meshDistributor(NULL)
{}
virtual ~ParallelProblemStatBase() {}
void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix,
bool assembleVector);
void addToMeshDistributor(MeshDistributor& m);
virtual void addToMeshDistributor(MeshDistributor& m);
protected:
void processMemUsage(double& vm_usage, double& resident_set);
......
......@@ -27,6 +27,8 @@
#include "Global.h"
#include "parallel/ParallelProblemStatBase.h"
#include "parallel/PetscSolver.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "parallel/PetscSolverSchur.h"
namespace AMDiS {
......@@ -58,6 +60,13 @@ namespace AMDiS {
delete petscSolver;
}
void addToMeshDistributor(MeshDistributor& m)
{
ParallelProblemStatBase::addToMeshDistributor(m);
meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
}
void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
protected:
......
This diff is collapsed.
......@@ -69,6 +69,11 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
virtual Flag getBoundaryDofRequirement()
{
return 0;
}
protected:
void printSolutionInfo(AdaptInfo* adaptInfo,
bool iterationCounter = true,
......@@ -92,99 +97,6 @@ namespace AMDiS {
};
class PetscSolverGlobalMatrix : public PetscSolver
{
public:
PetscSolverGlobalMatrix()
: PetscSolver(),
d_nnz(NULL),
o_nnz(NULL),
lastMeshNnz(0),
zeroStartVector(false)
{
GET_PARAMETER(0, "parallel->use zero start vector", "%d", &zeroStartVector);
}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
protected:
/// Creates a new non zero pattern structure for the PETSc matrix.
void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false);
protected:
/// Arrays definig the non zero pattern of Petsc's matrix.
int *d_nnz, *o_nnz;
/** \brief
* Stores the mesh change index of the mesh the nnz structure was created for.
* Therefore, if the mesh change index is higher than this value, we have to create
* a new nnz structure for PETSc matrices, because the mesh has been changed and
* therefore also the assembled matrix structure.
*/
int lastMeshNnz;
bool zeroStartVector;
};
#ifdef HAVE_PETSC_DEV
class PetscSolverSchur : public PetscSolver
{
public:
PetscSolverSchur()
: PetscSolver()
{}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
protected:
void updateDofData();
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false);
protected:
int nBoundaryDofs;
int rStartBoundaryDofs;
int nOverallBoundaryDofs;
std::set<DegreeOfFreedom> boundaryDofs;
map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalBoundaryDof;
int nInteriorDofs;
int rStartInteriorDofs;
int nOverallInteriorDofs;
std::set<DegreeOfFreedom> interiorDofs;
map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalInteriorDof;
Mat matA11, matA12, matA21, matA22;
};
#endif
} // namespace AMDiS
#endif
This diff is collapsed.
// ============================================================================
// == ==
// == AMDiS - Adaptive multidimensional simulations ==
// == ==
// == http://www.amdis-fem.org ==
// == ==
// ============================================================================
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
/** \file PetscSolverGlobalMatrix.h */
#ifndef AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H
#define AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H
#include "AMDiS_fwd.h"
#include "parallel/PetscSolver.h"
namespace AMDiS {
using namespace std;
class PetscSolverGlobalMatrix : public PetscSolver
{
public:
PetscSolverGlobalMatrix()
: PetscSolver(),
d_nnz(NULL),
o_nnz(NULL),
lastMeshNnz(0),
zeroStartVector(false)
{
GET_PARAMETER(0, "parallel->use zero start vector", "%d", &zeroStartVector);
}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
protected:
/// Creates a new non zero pattern structure for the PETSc matrix.
void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false);
protected:
/// Arrays definig the non zero pattern of Petsc's matrix.
int *d_nnz, *o_nnz;
/** \brief
* Stores the mesh change index of the mesh the nnz structure was created for.
* Therefore, if the mesh change index is higher than this value, we have to create
* a new nnz structure for PETSc matrices, because the mesh has been changed and
* therefore also the assembled matrix structure.
*/
int lastMeshNnz;
bool zeroStartVector;
};
}
#endif
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
#include "parallel/PetscSolverSchur.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
namespace AMDiS {
using namespace std;
#ifdef HAVE_PETSC_DEV
void PetscSolverSchur::updateDofData(int nComponents)
{
FUNCNAME("PetscSolverSchur::updateDofData()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
typedef map<int, DofContainer> RankToDofContainer;
typedef map<DegreeOfFreedom, bool> DofIndexToBool;
boundaryDofs.clear();
std::set<DegreeOfFreedom> boundaryLocalDofs;
RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
for (RankToDofContainer::iterator rankIt = sendDofs.begin();
rankIt != sendDofs.end(); ++rankIt) {
for (DofContainer::iterator dofIt = rankIt->second.begin();
dofIt != rankIt->second.end(); ++dofIt) {
boundaryLocalDofs.insert(**dofIt);
boundaryDofs.insert(meshDistributor->mapLocalToGlobal(**dofIt));
}
}
nBoundaryDofs = boundaryDofs.size();
mpi::getDofNumbering(mpiComm, nBoundaryDofs,
rStartBoundaryDofs, nOverallBoundaryDofs);
int counter = rStartBoundaryDofs;
mapGlobalBoundaryDof.clear();
for (std::set<DegreeOfFreedom>::iterator it = boundaryDofs.begin();
it != boundaryDofs.end(); ++it)
mapGlobalBoundaryDof[*it] = counter++;
std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
for (RankToDofContainer::iterator rankIt = recvDofs.begin();
rankIt != recvDofs.end(); ++rankIt)
for (DofContainer::iterator dofIt = rankIt->second.begin();
dofIt != rankIt->second.end(); ++dofIt)
otherBoundaryLocalDofs.insert(**dofIt);
interiorDofs.clear();
DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
for (DofIndexToBool::iterator dofIt = isRankDof.begin();
dofIt != isRankDof.end(); ++dofIt) {
if (dofIt->second &&
boundaryLocalDofs.count(dofIt->first) == 0 &&
otherBoundaryLocalDofs.count(dofIt->first) == 0)
interiorDofs.insert(meshDistributor->mapLocalToGlobal(dofIt->first));
}
nInteriorDofs = interiorDofs.size();
mpi::getDofNumbering(mpiComm, nInteriorDofs,
rStartInteriorDofs, nOverallInteriorDofs);
counter = rStartInteriorDofs;