Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit f1724e2f authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Before merging SubDomainSolver and PetscSolver together.

parent 5671aaf5
......@@ -87,7 +87,10 @@ namespace AMDiS {
double wtime = MPI::Wtime();
if (createMatrixData) {
petscSolver->setMeshDistributor(meshDistributor);
petscSolver->setMeshDistributor(meshDistributor,
meshDistributor->getMpiComm(),
PETSC_COMM_SELF);
petscSolver->setDofMapping(&(meshDistributor->getDofMap()));
petscSolver->fillPetscMatrix(systemMatrix);
}
......
......@@ -10,17 +10,21 @@
// See also license.opensource.txt in the distribution.
#include "AMDiS.h"
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
namespace AMDiS {
using namespace std;
PetscSolver::PetscSolver()
: meshDistributor(NULL),
dofMap(NULL),
subdomainLevel(0),
interiorMap(NULL),
coarseSpaceMap(NULL),
mpiRank(-1),
kspPrefix(""),
removeRhsNullSpace(false)
......@@ -34,26 +38,49 @@ namespace AMDiS {
}
void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo,
bool iterationCounter,
bool residual)
void PetscSolver::setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs)
{
FUNCNAME("PetscSolver::printSolutionInfo()");
interiorMap = interiorDofs;
coarseSpaceMap = coarseDofs;
if (mpiCommLocal.Get_size() == 1) {
rStartInterior = 0;
nGlobalOverallInterior = interiorMap->getOverallDofs();
} else {
int groupRowsInterior = 0;
if (mpiCommLocal.Get_rank() == 0)
groupRowsInterior = interiorMap->getOverallDofs();
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
int tmp = 0;
if (mpiCommLocal.Get_rank() == 0)
tmp = rStartInterior;
if (iterationCounter) {
int iterations = 0;
KSPGetIterationNumber(solver, &iterations);
MSG(" Number of iterations: %d\n", iterations);
adaptInfo->setSolverIterations(iterations);
mpiCommLocal.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
}
}
if (residual) {
double norm = 0.0;
MatMult(petscMatrix, petscSolVec, petscTmpVec);
VecAXPY(petscTmpVec, -1.0, petscRhsVec);
VecNorm(petscTmpVec, NORM_2, &norm);
MSG(" Residual norm: %e\n", norm);
void PetscSolver::solve(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solve()");
PetscErrorCode solverError = KSPSolve(kspInterior, rhs, sol);
if (solverError != 0) {
AMDiS::finalize();
exit(-1);
}
}
void PetscSolver::solveGlobal(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solveGlobal()");
ERROR_EXIT("Not implemented!\n");
}
......@@ -63,13 +90,13 @@ namespace AMDiS {
FUNCNAME("PetscSolver::copyVec()");
IS originIs, destIs;
ISCreateGeneral(mpiComm,
ISCreateGeneral(mpiCommGlobal,
originIndex.size(),
&(originIndex[0]),
PETSC_USE_POINTER,
&originIs);
ISCreateGeneral(mpiComm,
ISCreateGeneral(mpiCommGlobal,
destIndex.size(),
&(destIndex[0]),
PETSC_USE_POINTER,
......
......@@ -50,15 +50,24 @@ namespace AMDiS {
virtual ~PetscSolver() {}
void setMeshDistributor(MeshDistributor *m)
void setMeshDistributor(MeshDistributor *m,
MPI::Intracomm mpiComm0,
MPI::Intracomm mpiComm1)
{
meshDistributor = m;
dofMap = &(meshDistributor->getDofMap());
mpiRank = meshDistributor->getMpiRank();
mpiComm = meshDistributor->getMpiComm();
mpiSelfComm = PETSC_COMM_SELF;
mpiCommGlobal = mpiComm0;
mpiCommLocal = mpiComm1;
mpiRank = mpiCommGlobal.Get_rank();
}
void setLevel(int l)
{
subdomainLevel = l;
}
void setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs = NULL);
/** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it.
......@@ -77,6 +86,10 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
virtual void solve(Vec &rhs, Vec &sol);
virtual void solveGlobal(Vec &rhs, Vec &sol);
/// Destroys all matrix data structures.
virtual void destroyMatrixData() = 0;
......@@ -90,12 +103,12 @@ namespace AMDiS {
KSP getSolver()
{
return solver;
return kspInterior;
}
PC getPc()
{
return pc;
return pcInterior;
}
void setKspPrefix(std::string s)
......@@ -108,11 +121,63 @@ namespace AMDiS {
removeRhsNullSpace = b;
}
protected:
void printSolutionInfo(AdaptInfo* adaptInfo,
bool iterationCounter = true,
bool residual = true);
inline bool isCoarseSpace(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
FUNCNAME("SubDomainSolver::isCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return (*coarseSpaceMap)[feSpace].isSet(dof);
}
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("SubDomainSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return rhsCoarseSpace;
}
inline Vec& getRhsInterior()
{
return rhsInterior;
}
inline Mat& getMatIntInt()
{
return matIntInt;
}
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("SubDomainSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseCoarse;
}
inline Mat& getMatIntCoarse()
{
FUNCNAME("SubDomainSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matIntCoarse;
}
inline Mat& getMatCoarseInt()
{
FUNCNAME("SubDomainSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseInt;
}
protected:
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
......@@ -140,26 +205,36 @@ namespace AMDiS {
protected:
MeshDistributor *meshDistributor;
ParallelDofMapping *dofMap;
int subdomainLevel;
int rStartInterior;
int nGlobalOverallInterior;
ParallelDofMapping *interiorMap;
ParallelDofMapping* coarseSpaceMap;
int mpiRank;
MPI::Intracomm mpiComm;
MPI::Intracomm mpiCommGlobal;
MPI::Intracomm mpiSelfComm;
MPI::Intracomm mpiCommLocal;
/// Petsc's matrix structure.
Mat petscMatrix;
Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
Vec petscRhsVec, petscSolVec, petscTmpVec;
Vec rhsInterior;
Vec rhsCoarseSpace;
/// PETSc solver object
KSP solver;
KSP kspInterior;
/// PETSc preconditioner object
PC pc;
PC pcInterior;
/// KSP database prefix
string kspPrefix;
......
......@@ -233,12 +233,13 @@ namespace AMDiS {
MeshLevelData& levelData = meshDistributor->getMeshLevelData();
if (subDomainSolver == NULL) {
subDomainSolver = new SubDomainSolver();
if (meshLevel == 0) {
subDomainSolver =
new SubDomainSolver(meshDistributor, mpiComm, mpiSelfComm);
subDomainSolver->setMeshDistributor(meshDistributor,
mpiCommGlobal, mpiCommLocal);
} else {
subDomainSolver =
new SubDomainSolver(meshDistributor,
subDomainSolver->setMeshDistributor(meshDistributor,
levelData.getMpiComm(meshLevel - 1),
levelData.getMpiComm(meshLevel));
subDomainSolver->setLevel(meshLevel);
......@@ -350,7 +351,7 @@ namespace AMDiS {
if (levelData.getMpiComm(1).Get_rank() == 0)
groupRowsInterior = localDofMap.getOverallDofs();
mpi::getDofNumbering(mpiComm, groupRowsInterior,
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
int tmp = 0;
......@@ -383,7 +384,7 @@ namespace AMDiS {
}
// If multi level test, inform sub domain solver about coarse space.
subDomainSolver->setDofMapping(&primalDofMap, &localDofMap);
subDomainSolver->setDofMapping(&localDofMap, &primalDofMap);
}
......@@ -463,7 +464,7 @@ namespace AMDiS {
map<int, std::set<DegreeOfFreedom> > sdRankDofs;
if (meshLevel > 0) {
StdMpi<vector<int> > stdMpi(mpiComm);
StdMpi<vector<int> > stdMpi(mpiCommGlobal);
for (DofComm::Iterator it(meshDistributor->getDofComm().getRecvDofs(),
meshLevel, feSpace);
......@@ -645,7 +646,7 @@ namespace AMDiS {
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ(mpiComm,
MatCreateMPIAIJ(mpiCommGlobal,
lagrangeMap.getRankDofs(), localDofMap.getRankDofs(),
lagrangeMap.getOverallDofs(), nGlobalOverallInterior,
2, PETSC_NULL, 2, PETSC_NULL,
......@@ -705,16 +706,16 @@ namespace AMDiS {
schurPrimalData.subSolver = subDomainSolver;
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
nGlobalOverallInterior,
&(schurPrimalData.tmp_vec_b));
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(),
&(schurPrimalData.tmp_vec_primal));
MatCreateShell(mpiComm,
MatCreateShell(mpiCommGlobal,
primalDofMap.getRankDofs(),
primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(),
......@@ -724,7 +725,7 @@ namespace AMDiS {
MatShellSetOperation(mat_schur_primal, MATOP_MULT,
(void(*)(void))petscMultMatSchurPrimal);
KSPCreate(mpiComm, &ksp_schur_primal);
KSPCreate(mpiCommGlobal, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPGMRES);
......@@ -742,7 +743,7 @@ namespace AMDiS {
int nRowsRankB = localDofMap.getRankDofs();
Mat matBPi;
MatCreateMPIAIJ(mpiComm,
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankB, nRowsRankPrimal,
nGlobalOverallInterior, nRowsOverallPrimal,
30, PETSC_NULL, 30, PETSC_NULL, &matBPi);
......@@ -810,7 +811,7 @@ namespace AMDiS {
MatGetInfo(mat_schur_primal, MAT_GLOBAL_SUM, &minfo);
MSG("Schur primal matrix nnz = %f\n", minfo.nz_used);
KSPCreate(mpiComm, &ksp_schur_primal);
KSPCreate(mpiCommGlobal, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal,
SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
......@@ -853,20 +854,20 @@ namespace AMDiS {
fetiData.subSolver = subDomainSolver;
fetiData.ksp_schur_primal = &ksp_schur_primal;
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
nGlobalOverallInterior,
&(fetiData.tmp_vec_b));
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
lagrangeMap.getRankDofs(),
lagrangeMap.getOverallDofs(),
&(fetiData.tmp_vec_lagrange));
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(),
&(fetiData.tmp_vec_primal));
MatCreateShell(mpiComm,
MatCreateShell(mpiCommGlobal,
lagrangeMap.getRankDofs(),
lagrangeMap.getRankDofs(),
lagrangeMap.getOverallDofs(),
......@@ -875,7 +876,7 @@ namespace AMDiS {
MatShellSetOperation(mat_feti, MATOP_MULT, (void(*)(void))petscMultMatFeti);
KSPCreate(mpiComm, &ksp_feti);
KSPCreate(mpiCommGlobal, &ksp_feti);
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES);
......@@ -913,7 +914,7 @@ namespace AMDiS {
fetiDirichletPreconData.mat_duals_interior = &mat_duals_interior;
fetiDirichletPreconData.ksp_interior = &ksp_interior;
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
nGlobalOverallInterior,
&(fetiDirichletPreconData.tmp_vec_b));
......@@ -958,7 +959,7 @@ namespace AMDiS {
}
}
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
localDofMap.getOverallDofs(),
&(fetiLumpedPreconData.tmp_vec_b));
......@@ -1343,18 +1344,18 @@ namespace AMDiS {
// Some temporary vectors.
Vec tmp_b0, tmp_b1, tmp_lagrange0, tmp_primal0, tmp_primal1;
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
nGlobalOverallInterior,
&tmp_b0);
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(),
nGlobalOverallInterior,
&tmp_b1);
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), &tmp_primal0);
VecCreateMPI(mpiComm,
VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), &tmp_primal1);
......
......@@ -21,14 +21,14 @@ namespace AMDiS {
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = mat->getNumRows();
int nRankRows = (*dofMap)[feSpace].nRankDofs;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs;
int nRankRows = (*interiorMap)[feSpace].nRankDofs;
int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
......@@ -54,7 +54,7 @@ namespace AMDiS {
for (int i = 0; i < nBlocks; i++)
for (int j = 0; j < nBlocks; j++)
MatCreateMPIAIJ(mpiComm,
MatCreateMPIAIJ(mpiCommGlobal,
nRankRows * blockSize[i], nRankRows * blockSize[j],
nOverallRows * blockSize[i], nOverallRows * blockSize[j],
30 * blockSize[i], PETSC_NULL,
......@@ -80,21 +80,21 @@ namespace AMDiS {
}
MatCreateNest(mpiComm,
MatCreateNest(mpiCommGlobal,
nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &petscMatrix);
&(nestMat[0]), &matIntInt);
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
// === Init PETSc solver. ===
KSPCreate(mpiComm, &solver);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetFromOptions(solver);
KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN);
KSPSetFromOptions(kspInterior);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
}
......@@ -108,13 +108,13 @@ namespace AMDiS {
nComponents = vec->getSize();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nRankRows = (*dofMap)[feSpace].nRankDofs;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs;
int nRankRows = (*interiorMap)[feSpace].nRankDofs;
int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
nestVec.resize(nComponents);
for (int i = 0; i < nComponents; i++) {
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &(nestVec[i]));
VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &(nestVec[i]));
setDofVector(nestVec[i], vec->getDOFVector(i));
......@@ -122,11 +122,11 @@ namespace AMDiS {
VecAssemblyEnd(nestVec[i]);
}
VecCreateNest(mpiComm, nComponents, PETSC_NULL,
&(nestVec[0]), &petscRhsVec);
VecCreateNest(mpiCommGlobal, nComponents, PETSC_NULL,
&(nestVec[0]), &rhsInterior);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
}
......@@ -135,14 +135,14 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");
KSPGetPC(solver, &pc);
setBlockPreconditioner(pc);
KSPGetPC(kspInterior, &pcInterior);
setBlockPreconditioner(pcInterior);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
VecDuplicate(petscRhsVec, &petscSolVec);
VecDuplicate(rhsInterior, &petscSolVec);
// PETSc.
KSPSolve(solver, petscRhsVec, petscSolVec);
solve(rhsInterior, petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
for (int i = 0; i < nComponents; i++) {
......@@ -151,11 +151,11 @@ namespace AMDiS {
Vec tmp;
VecNestGetSubVec(petscSolVec, i, &tmp);
int nRankDofs = (*dofMap)[feSpace].nRankDofs;
int nRankDofs = (*interiorMap)[feSpace].nRankDofs;
PetscScalar *vecPointer;
VecGetArray(tmp, &vecPointer);
DofMap& d = (*dofMap)[feSpace].getMap();
DofMap& d = (*interiorMap)[feSpace].getMap();
for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
if (it->second.local != -1)
dofvec[it->first] = vecPointer[it->second.local];
......@@ -178,8 +178,8 @@ namespace AMDiS {
if (nestMat[i] != PETSC_NULL)
MatDestroy(&(nestMat[i]));
MatDestroy(&petscMatrix);
KSPDestroy(&solver);
MatDestroy(&matIntInt);
KSPDestroy(&kspInterior);
}
......@@ -187,7 +187,7 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");
VecDestroy(&petscRhsVec);
VecDestroy(&rhsInterior);
for (int i = 0; i < nComponents; i++)
VecDestroy(&(nestVec[i]));
......@@ -217,8 +217,8 @@ namespace AMDiS {
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
int dispRowIndex = (*dofMap)[feSpace].nRankDofs * dispRowBlock;
int dispColIndex = (*dofMap)[feSpace].nRankDofs * dispColBlock;
int dispRowIndex = (*interiorMap)[feSpace].nRankDofs * dispRowBlock;
int dispColIndex = (*interiorMap)[feSpace].nRankDofs * dispColBlock;
vector<int> cols;
vector<double> values;
......@@ -232,7 +232,7 @@ namespace AMDiS {
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int rowIndex = (*dofMap)[feSpace][*cursor].global + dispRowIndex;
int rowIndex = (*interiorMap)[feSpace][*cursor].global + dispRowIndex;
cols.clear();
values.clear();
......@@ -240,7 +240,7 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {