Commit 713cf136 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Splitted PETSc solver.

parent 8881aea2
......@@ -206,6 +206,8 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/ParMetisPartitioner.cc
${SOURCE_DIR}/parallel/PetscProblemStat.cc
${SOURCE_DIR}/parallel/PetscSolver.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc
${SOURCE_DIR}/parallel/StdMpi.cc
${SOURCE_DIR}/parallel/ZoltanPartitioner.cc)
SET(COMPILEFLAGS "${COMPILEFLAGS} -DHAVE_PARALLEL_DOMAIN_AMDIS=1")
......
......@@ -44,7 +44,7 @@ available_tags=" CXX F77"
# ### BEGIN LIBTOOL CONFIG
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......@@ -6760,7 +6760,7 @@ build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
# End:
# ### BEGIN LIBTOOL TAG CONFIG: CXX
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......@@ -7065,7 +7065,7 @@ include_expsyms=""
# ### BEGIN LIBTOOL TAG CONFIG: F77
# Libtool was configured on host deimos104:
# Libtool was configured on host deimos102:
# Shell to use when invoking shell scripts.
SHELL="/bin/sh"
......
......@@ -23,6 +23,8 @@ if USE_PARALLEL_DOMAIN_AMDIS
parallel/ParMetisPartitioner.cc \
parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc \
parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc \
parallel/StdMpi.cc
libamdis_la_CXXFLAGS += -DHAVE_PARALLEL_DOMAIN_AMDIS=1
AMDIS_INCLUDES += -I$(PARMETIS_DIR)
......@@ -257,6 +259,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \
parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......
......@@ -49,6 +49,8 @@ host_triplet = @host@
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/ParMetisPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscProblemStat.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolver.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverGlobalMatrix.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/PetscSolverSchur.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ parallel/StdMpi.cc
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@am__append_2 = -DHAVE_PARALLEL_DOMAIN_AMDIS=1
......@@ -95,7 +97,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
parallel/MpiHelper.cc parallel/ParallelDebug.cc \
parallel/ParallelProblemStatBase.cc \
parallel/ParMetisPartitioner.cc parallel/PetscProblemStat.cc \
parallel/PetscSolver.cc parallel/StdMpi.cc \
parallel/PetscSolver.cc parallel/PetscSolverGlobalMatrix.cc \
parallel/PetscSolverSchur.cc parallel/StdMpi.cc \
parallel/ZoltanPartitioner.cc AdaptBase.cc AdaptInfo.cc \
AdaptInstationary.cc AdaptStationary.cc Assembler.cc \
BasisFunction.cc Boundary.cc BoundaryManager.cc Cholesky.cc \
......@@ -142,6 +145,8 @@ am__libamdis_la_SOURCES_DIST = parallel/CheckerPartitioner.cc \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-ParMetisPartitioner.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscProblemStat.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolver.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverGlobalMatrix.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-PetscSolverSchur.lo \
@USE_PARALLEL_DOMAIN_AMDIS_TRUE@ libamdis_la-StdMpi.lo
@USE_ZOLTAN_TRUE@am__objects_2 = libamdis_la-ZoltanPartitioner.lo
am__objects_3 = $(am__objects_1) $(am__objects_2)
......@@ -577,6 +582,8 @@ parallel/ParallelProblemStatBase.h \
parallel/ParMetisPartitioner.h \
parallel/PetscProblemStat.h \
parallel/PetscSolver.h \
parallel/PetscSolverGlobalMatrix.h \
parallel/PetscSolverSchur.h \
parallel/StdMpi.h \
parallel/ZoltanPartitioner.h \
reinit/BoundaryElementDist.h \
......@@ -881,6 +888,8 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PeriodicBC.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscProblemStat.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolver.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PngWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-PovrayWriter.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libamdis_la-ProblemImplicit.Plo@am__quote@
......@@ -1048,6 +1057,20 @@ libamdis_la-PetscSolver.lo: parallel/PetscSolver.cc
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolver.lo `test -f 'parallel/PetscSolver.cc' || echo '$(srcdir)/'`parallel/PetscSolver.cc
libamdis_la-PetscSolverGlobalMatrix.lo: parallel/PetscSolverGlobalMatrix.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverGlobalMatrix.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverGlobalMatrix.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverGlobalMatrix.cc' object='libamdis_la-PetscSolverGlobalMatrix.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverGlobalMatrix.lo `test -f 'parallel/PetscSolverGlobalMatrix.cc' || echo '$(srcdir)/'`parallel/PetscSolverGlobalMatrix.cc
libamdis_la-PetscSolverSchur.lo: parallel/PetscSolverSchur.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-PetscSolverSchur.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo" "$(DEPDIR)/libamdis_la-PetscSolverSchur.Plo"; else rm -f "$(DEPDIR)/libamdis_la-PetscSolverSchur.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='parallel/PetscSolverSchur.cc' object='libamdis_la-PetscSolverSchur.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -c -o libamdis_la-PetscSolverSchur.lo `test -f 'parallel/PetscSolverSchur.cc' || echo '$(srcdir)/'`parallel/PetscSolverSchur.cc
libamdis_la-StdMpi.lo: parallel/StdMpi.cc
@am__fastdepCXX_TRUE@ if $(LIBTOOL) --tag=CXX --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libamdis_la_CXXFLAGS) $(CXXFLAGS) -MT libamdis_la-StdMpi.lo -MD -MP -MF "$(DEPDIR)/libamdis_la-StdMpi.Tpo" -c -o libamdis_la-StdMpi.lo `test -f 'parallel/StdMpi.cc' || echo '$(srcdir)/'`parallel/StdMpi.cc; \
@am__fastdepCXX_TRUE@ then mv -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo" "$(DEPDIR)/libamdis_la-StdMpi.Plo"; else rm -f "$(DEPDIR)/libamdis_la-StdMpi.Tpo"; exit 1; fi
......
......@@ -55,6 +55,9 @@ namespace AMDiS {
using namespace boost::filesystem;
using namespace std;
const Flag MeshDistributor::BOUNDARY_SUBOBJ_SORTED = 0X01L;
const Flag MeshDistributor::BOUNDARY_EDGE_SCHUR = 0X02L;
inline bool cmpDofsByValue(const DegreeOfFreedom* dof1, const DegreeOfFreedom* dof2)
{
return (*dof1 < *dof2);
......@@ -80,7 +83,8 @@ namespace AMDiS {
nMeshChangesAfterLastRepartitioning(0),
repartitioningCounter(0),
debugOutputDir(""),
lastMeshChangeIndex(0)
lastMeshChangeIndex(0),
createBoundaryDofFlag(0)
{
FUNCNAME("MeshDistributor::ParalleDomainBase()");
......@@ -1637,6 +1641,50 @@ namespace AMDiS {
}
void MeshDistributor::createBoundaryDofs()
{
FUNCNAME("MeshDistributor::createBoundaryDofs()");
sendDofs.clear();
recvDofs.clear();
if (createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) {
MSG("WITH BOUNDARY SUBOBJ SORTED!\n");
DofContainer dofs;
for (int geo = FACE; geo >= VERTEX; geo--) {
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] = 0;
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
if (it->rankObj.subObj == geo) {
dofs.clear();
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
boundaryDofInfo.nGeoDofs[static_cast<GeoIndex>(geo)] += dofs.size();
DofContainer &tmp = sendDofs[it.getRank()];
tmp.insert(tmp.end(), dofs.begin(), dofs.end());
}
}
}
for (int geo = FACE; geo >= VERTEX; geo--)
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
if (it->rankObj.subObj == geo)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
} else {
MSG("GAAAAAAANZ NORMAL!\n");
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
sendDofs[it.getRank()]);
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
it->rankObj.el->getAllDofs(feSpace, it->rankObj,
recvDofs[it.getRank()]);
}
}
void MeshDistributor::removeMacroElements()
{
FUNCNAME("MeshDistributor::removeMacroElements()");
......@@ -1662,9 +1710,6 @@ namespace AMDiS {
debug::createSortedDofs(mesh, elMap);
#endif
sendDofs.clear();
recvDofs.clear();
// === Get all DOFs in ranks partition. ===
......@@ -1677,31 +1722,22 @@ namespace AMDiS {
// === Traverse interior boundaries and get all DOFs on them. ===
createBoundaryDofs();
for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) {
DofContainer dofs;
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++)
sendDofs[it.getRank()].push_back(dofs[i]);
}
for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) {
DofContainer dofs;
it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++) {
// All DOFs that must be received are DOFs not owned by rank and have
// therefore to be removed from the set 'rankDofs'.
for (RankToDofContainer::iterator recvIt = recvDofs.begin();
recvIt != recvDofs.end(); ++recvIt) {
for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) {
DofContainer::iterator eraseIt =
find(rankDofs.begin(), rankDofs.end(), dofs[i]);
find(rankDofs.begin(), rankDofs.end(), *dofIt);
if (eraseIt != rankDofs.end())
rankDofs.erase(eraseIt);
recvDofs[it.getRank()].push_back(dofs[i]);
}
}
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size();
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
......
......@@ -44,7 +44,14 @@
namespace AMDiS {
using namespace std;
struct BoundaryDofInfo
{
map<GeoIndex, int> nGeoDofs;
};
class MeshDistributor
{
protected:
......@@ -293,6 +300,11 @@ namespace AMDiS {
void check3dValidMesh();
void setBoundaryDofRequirement(Flag flag)
{
createBoundaryDofFlag = flag;
}
protected:
/** \brief
* Determines the interior boundaries, i.e. boundaries between ranks, and stores
......@@ -306,6 +318,8 @@ namespace AMDiS {
void createBoundaryData();
void createBoundaryDofs();
/// Removes all macro elements from the mesh that are not part of ranks partition.
void removeMacroElements();
......@@ -599,6 +613,15 @@ namespace AMDiS {
/// redistributed for the first time.
vector<MacroElement*> allMacroElements;
Flag createBoundaryDofFlag;
BoundaryDofInfo boundaryDofInfo;
public:
///
static const Flag BOUNDARY_SUBOBJ_SORTED;
static const Flag BOUNDARY_EDGE_SCHUR;
friend class ParallelDebug;
};
}
......
......@@ -37,12 +37,13 @@ namespace AMDiS {
meshDistributor(NULL)
{}
virtual ~ParallelProblemStatBase() {}
void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix,
bool assembleVector);
void addToMeshDistributor(MeshDistributor& m);
virtual void addToMeshDistributor(MeshDistributor& m);
protected:
void processMemUsage(double& vm_usage, double& resident_set);
......
......@@ -27,6 +27,8 @@
#include "Global.h"
#include "parallel/ParallelProblemStatBase.h"
#include "parallel/PetscSolver.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "parallel/PetscSolverSchur.h"
namespace AMDiS {
......@@ -58,6 +60,13 @@ namespace AMDiS {
delete petscSolver;
}
void addToMeshDistributor(MeshDistributor& m)
{
ParallelProblemStatBase::addToMeshDistributor(m);
meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
}
void solve(AdaptInfo *adaptInfo, bool fixedMatrix = false);
protected:
......
......@@ -18,15 +18,6 @@ namespace AMDiS {
using namespace std;
PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
{
if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
cout << "[0] Petsc-Iteration " << iter << ": " << rnorm << endl;
return 0;
}
void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo,
bool iterationCounter,
bool residual)
......@@ -49,1001 +40,4 @@ namespace AMDiS {
}
}
void PetscSolverGlobalMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
double wtime = MPI::Wtime();
int nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
// === Create PETSc vector (rhs, solution and a temporary vector). ===
VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
VecSetType(petscRhsVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscSolVec);
VecSetSizes(petscSolVec, nRankRows, nOverallRows);
VecSetType(petscSolVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
VecSetType(petscTmpVec, VECMPI);
int recvAllValues = 0;
int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
if (!d_nnz || recvAllValues != 0) {
if (d_nnz) {
delete [] d_nnz;
d_nnz = NULL;
delete [] o_nnz;
o_nnz = NULL;
}
createPetscNnzStructure(mat);
lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
}
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
0, d_nnz, 0, o_nnz, &petscMatrix);
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
#if (DEBUG != 0)
int a, b;
MatGetOwnershipRange(petscMatrix, &a, &b);
TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
("Wrong matrix ownership range!\n");
TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
("Wrong matrix ownership range!\n");
#endif
// === Transfer values from DOF matrices to the PETSc matrix. ===
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j])
setDofMatrix((*mat)[i][j], nComponents, i, j);
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
#if (DEBUG != 0)
MSG("Fill petsc matrix 3 needed %.5f seconds\n",
TIME_USED(MPI::Wtime(), wtime));
#endif
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < nComponents; i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
}
void PetscSolverGlobalMatrix::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
{
FUNCNAME("PetscSolverGlobalMatrix::solvePetscMatrix()");
int nComponents = vec.getSize();
// === Set old solution to be initiual guess for PETSc solver. ===
if (!zeroStartVector) {
VecSet(petscSolVec, 0.0);
for (int i = 0; i < nComponents; i++)
setDofVector(petscSolVec, vec.getDOFVector(i), nComponents, i, true);
VecAssemblyBegin(petscSolVec);
VecAssemblyEnd(petscSolVec);
}
// === Init PETSc solver. ===
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
// Do not delete the solution vector, use it for the initial guess.
if (!zeroStartVector)
KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
// PETSc.
KSPSolve(solver, petscRhsVec, petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int nRankDofs = meshDistributor->getNumberRankDofs();
PetscScalar *vecPointer;
VecGetArray(petscSolVec, &vecPointer);
for (int i = 0; i < nComponents; i++) {
DOFVector<double> &dofvec = *(vec.getDOFVector(i));
for (int j = 0; j < nRankDofs; j++)
dofvec[meshDistributor->mapLocalToDofIndex(j)] =
vecPointer[j * nComponents + i];
}
VecRestoreArray(petscSolVec, &vecPointer);
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// === more than one partition. ===
meshDistributor->synchVector(vec);
// Print iteration counter and residual norm of the solution.
printSolutionInfo(adaptInfo);
// === Destroy PETSc's variables. ===
MatDestroy(petscMatrix);
VecDestroy(petscRhsVec);
VecDestroy(petscSolVec);
VecDestroy(petscTmpVec);
KSPDestroy(solver);
}
void PetscSolverGlobalMatrix::setDofMatrix(DOFMatrix* mat, int dispMult,
int dispAddRow, int dispAddCol)
{
FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()");
TEST_EXIT(mat)("No DOFMatrix!\n");
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits= mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
traits::col<Matrix>::type col(mat->getBaseMatrix());
traits::const_value<Matrix>::type value(mat->getBaseMatrix());
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> cols;
vector<double> values;
cols.reserve(300);
values.reserve(300);
vector<int> globalCols;
// === Traverse all rows of the dof matrix and insert row wise the values ===
// === to the PETSc matrix. ===
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
// Test if the current row DOF is a periodic DOF.
bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
if (!periodicRow) {
// === Row DOF index is not periodic. ===
// Calculate PETSc row index.
int rowIndex = globalRowDof * dispMult + dispAddRow;
cols.clear();
values.clear();
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
// Test if the current col dof is a periodic dof.
bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
// Calculate PETSc col index.
int colIndex = globalColDof * dispMult + dispAddCol;
// Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex)
continue;
if (!periodicCol) {
// Calculate the exact position of the column index in the PETSc matrix.
cols.push_back(colIndex);
values.push_back(value(*icursor));
} else {
// === Row index is not periodic, but column index is. ===
// Create set of all periodic associations of the column index.
std::set<int> perAsc;
std::set<int>& perColAsc =
meshDistributor->getPerDofAssociations(globalColDof);
for (std::set<int>::iterator it = perColAsc.begin();
it != perColAsc.end(); ++it)
if (*it >= -3)
perAsc.insert(*it);
// Scale value to the number of periodic associations of the column index.
double scaledValue =
value(*icursor) * pow(0.5, static_cast<double>(perAsc.size()));
// === Create set of all matrix column indices due to the periodic ===
// === associations of the column DOF index. ===
vector<int> newCols;