Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit f1724e2f authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Before merging SubDomainSolver and PetscSolver together.

parent 5671aaf5
...@@ -87,7 +87,10 @@ namespace AMDiS { ...@@ -87,7 +87,10 @@ namespace AMDiS {
double wtime = MPI::Wtime(); double wtime = MPI::Wtime();
if (createMatrixData) { if (createMatrixData) {
petscSolver->setMeshDistributor(meshDistributor); petscSolver->setMeshDistributor(meshDistributor,
meshDistributor->getMpiComm(),
PETSC_COMM_SELF);
petscSolver->setDofMapping(&(meshDistributor->getDofMap()));
petscSolver->fillPetscMatrix(systemMatrix); petscSolver->fillPetscMatrix(systemMatrix);
} }
......
...@@ -10,17 +10,21 @@ ...@@ -10,17 +10,21 @@
// See also license.opensource.txt in the distribution. // See also license.opensource.txt in the distribution.
#include "AMDiS.h"
#include "parallel/PetscSolver.h" #include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h" #include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h" #include "parallel/MpiHelper.h"
namespace AMDiS { namespace AMDiS {
using namespace std; using namespace std;
PetscSolver::PetscSolver() PetscSolver::PetscSolver()
: meshDistributor(NULL), : meshDistributor(NULL),
dofMap(NULL), subdomainLevel(0),
interiorMap(NULL),
coarseSpaceMap(NULL),
mpiRank(-1), mpiRank(-1),
kspPrefix(""), kspPrefix(""),
removeRhsNullSpace(false) removeRhsNullSpace(false)
...@@ -34,42 +38,65 @@ namespace AMDiS { ...@@ -34,42 +38,65 @@ namespace AMDiS {
} }
void PetscSolver::printSolutionInfo(AdaptInfo *adaptInfo, void PetscSolver::setDofMapping(ParallelDofMapping *interiorDofs,
bool iterationCounter, ParallelDofMapping *coarseDofs)
bool residual)
{ {
FUNCNAME("PetscSolver::printSolutionInfo()"); interiorMap = interiorDofs;
coarseSpaceMap = coarseDofs;
if (mpiCommLocal.Get_size() == 1) {
rStartInterior = 0;
nGlobalOverallInterior = interiorMap->getOverallDofs();
} else {
int groupRowsInterior = 0;
if (mpiCommLocal.Get_rank() == 0)
groupRowsInterior = interiorMap->getOverallDofs();
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
if (iterationCounter) { int tmp = 0;
int iterations = 0; if (mpiCommLocal.Get_rank() == 0)
KSPGetIterationNumber(solver, &iterations); tmp = rStartInterior;
MSG(" Number of iterations: %d\n", iterations);
adaptInfo->setSolverIterations(iterations); mpiCommLocal.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
} }
}
void PetscSolver::solve(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solve()");
if (residual) { PetscErrorCode solverError = KSPSolve(kspInterior, rhs, sol);
double norm = 0.0; if (solverError != 0) {
MatMult(petscMatrix, petscSolVec, petscTmpVec); AMDiS::finalize();
VecAXPY(petscTmpVec, -1.0, petscRhsVec); exit(-1);
VecNorm(petscTmpVec, NORM_2, &norm);
MSG(" Residual norm: %e\n", norm);
} }
} }
void PetscSolver::solveGlobal(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solveGlobal()");
ERROR_EXIT("Not implemented!\n");
}
void PetscSolver::copyVec(Vec& originVec, Vec& destVec, void PetscSolver::copyVec(Vec& originVec, Vec& destVec,
vector<int>& originIndex, vector<int>& destIndex) vector<int>& originIndex, vector<int>& destIndex)
{ {
FUNCNAME("PetscSolver::copyVec()"); FUNCNAME("PetscSolver::copyVec()");
IS originIs, destIs; IS originIs, destIs;
ISCreateGeneral(mpiComm, ISCreateGeneral(mpiCommGlobal,
originIndex.size(), originIndex.size(),
&(originIndex[0]), &(originIndex[0]),
PETSC_USE_POINTER, PETSC_USE_POINTER,
&originIs); &originIs);
ISCreateGeneral(mpiComm, ISCreateGeneral(mpiCommGlobal,
destIndex.size(), destIndex.size(),
&(destIndex[0]), &(destIndex[0]),
PETSC_USE_POINTER, PETSC_USE_POINTER,
......
...@@ -50,15 +50,24 @@ namespace AMDiS { ...@@ -50,15 +50,24 @@ namespace AMDiS {
virtual ~PetscSolver() {} virtual ~PetscSolver() {}
void setMeshDistributor(MeshDistributor *m) void setMeshDistributor(MeshDistributor *m,
MPI::Intracomm mpiComm0,
MPI::Intracomm mpiComm1)
{ {
meshDistributor = m; meshDistributor = m;
dofMap = &(meshDistributor->getDofMap()); mpiCommGlobal = mpiComm0;
mpiRank = meshDistributor->getMpiRank(); mpiCommLocal = mpiComm1;
mpiComm = meshDistributor->getMpiComm(); mpiRank = mpiCommGlobal.Get_rank();
mpiSelfComm = PETSC_COMM_SELF;
} }
void setLevel(int l)
{
subdomainLevel = l;
}
void setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs = NULL);
/** \brief /** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz * Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it. * structure of the PETSc matrix and the values are transfered to it.
...@@ -77,6 +86,10 @@ namespace AMDiS { ...@@ -77,6 +86,10 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations /// Use PETSc to solve the linear system of equations
virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0; virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
virtual void solve(Vec &rhs, Vec &sol);
virtual void solveGlobal(Vec &rhs, Vec &sol);
/// Destroys all matrix data structures. /// Destroys all matrix data structures.
virtual void destroyMatrixData() = 0; virtual void destroyMatrixData() = 0;
...@@ -90,12 +103,12 @@ namespace AMDiS { ...@@ -90,12 +103,12 @@ namespace AMDiS {
KSP getSolver() KSP getSolver()
{ {
return solver; return kspInterior;
} }
PC getPc() PC getPc()
{ {
return pc; return pcInterior;
} }
void setKspPrefix(std::string s) void setKspPrefix(std::string s)
...@@ -108,11 +121,63 @@ namespace AMDiS { ...@@ -108,11 +121,63 @@ namespace AMDiS {
removeRhsNullSpace = b; removeRhsNullSpace = b;
} }
protected: inline bool isCoarseSpace(const FiniteElemSpace *feSpace,
void printSolutionInfo(AdaptInfo* adaptInfo, DegreeOfFreedom dof)
bool iterationCounter = true, {
bool residual = true); FUNCNAME("SubDomainSolver::isCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return (*coarseSpaceMap)[feSpace].isSet(dof);
}
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("SubDomainSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return rhsCoarseSpace;
}
inline Vec& getRhsInterior()
{
return rhsInterior;
}
inline Mat& getMatIntInt()
{
return matIntInt;
}
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("SubDomainSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseCoarse;
}
inline Mat& getMatIntCoarse()
{
FUNCNAME("SubDomainSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matIntCoarse;
}
inline Mat& getMatCoarseInt()
{
FUNCNAME("SubDomainSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseInt;
}
protected:
/** \brief /** \brief
* Copies between to PETSc vectors by using different index sets for the * Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors. * origin and the destination vectors.
...@@ -140,26 +205,36 @@ namespace AMDiS { ...@@ -140,26 +205,36 @@ namespace AMDiS {
protected: protected:
MeshDistributor *meshDistributor; MeshDistributor *meshDistributor;
ParallelDofMapping *dofMap; int subdomainLevel;
int rStartInterior;
int nGlobalOverallInterior;
ParallelDofMapping *interiorMap;
ParallelDofMapping* coarseSpaceMap;
int mpiRank; int mpiRank;
MPI::Intracomm mpiComm; MPI::Intracomm mpiCommGlobal;
MPI::Intracomm mpiSelfComm; MPI::Intracomm mpiCommLocal;
/// Petsc's matrix structure. /// Petsc's matrix structure.
Mat petscMatrix; Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
/// PETSc's vector structures for the rhs vector, the solution vector and a /// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum. /// temporary vector for calculating the final residuum.
Vec petscRhsVec, petscSolVec, petscTmpVec; Vec rhsInterior;
Vec rhsCoarseSpace;
/// PETSc solver object /// PETSc solver object
KSP solver; KSP kspInterior;
/// PETSc preconditioner object /// PETSc preconditioner object
PC pc; PC pcInterior;
/// KSP database prefix /// KSP database prefix
string kspPrefix; string kspPrefix;
......
...@@ -233,14 +233,15 @@ namespace AMDiS { ...@@ -233,14 +233,15 @@ namespace AMDiS {
MeshLevelData& levelData = meshDistributor->getMeshLevelData(); MeshLevelData& levelData = meshDistributor->getMeshLevelData();
if (subDomainSolver == NULL) { if (subDomainSolver == NULL) {
subDomainSolver = new SubDomainSolver();
if (meshLevel == 0) { if (meshLevel == 0) {
subDomainSolver = subDomainSolver->setMeshDistributor(meshDistributor,
new SubDomainSolver(meshDistributor, mpiComm, mpiSelfComm); mpiCommGlobal, mpiCommLocal);
} else { } else {
subDomainSolver = subDomainSolver->setMeshDistributor(meshDistributor,
new SubDomainSolver(meshDistributor, levelData.getMpiComm(meshLevel - 1),
levelData.getMpiComm(meshLevel - 1), levelData.getMpiComm(meshLevel));
levelData.getMpiComm(meshLevel));
subDomainSolver->setLevel(meshLevel); subDomainSolver->setLevel(meshLevel);
} }
} }
...@@ -350,7 +351,7 @@ namespace AMDiS { ...@@ -350,7 +351,7 @@ namespace AMDiS {
if (levelData.getMpiComm(1).Get_rank() == 0) if (levelData.getMpiComm(1).Get_rank() == 0)
groupRowsInterior = localDofMap.getOverallDofs(); groupRowsInterior = localDofMap.getOverallDofs();
mpi::getDofNumbering(mpiComm, groupRowsInterior, mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior); rStartInterior, nGlobalOverallInterior);
int tmp = 0; int tmp = 0;
...@@ -383,7 +384,7 @@ namespace AMDiS { ...@@ -383,7 +384,7 @@ namespace AMDiS {
} }
// If multi level test, inform sub domain solver about coarse space. // If multi level test, inform sub domain solver about coarse space.
subDomainSolver->setDofMapping(&primalDofMap, &localDofMap); subDomainSolver->setDofMapping(&localDofMap, &primalDofMap);
} }
...@@ -463,7 +464,7 @@ namespace AMDiS { ...@@ -463,7 +464,7 @@ namespace AMDiS {
map<int, std::set<DegreeOfFreedom> > sdRankDofs; map<int, std::set<DegreeOfFreedom> > sdRankDofs;
if (meshLevel > 0) { if (meshLevel > 0) {
StdMpi<vector<int> > stdMpi(mpiComm); StdMpi<vector<int> > stdMpi(mpiCommGlobal);
for (DofComm::Iterator it(meshDistributor->getDofComm().getRecvDofs(), for (DofComm::Iterator it(meshDistributor->getDofComm().getRecvDofs(),
meshLevel, feSpace); meshLevel, feSpace);
...@@ -645,7 +646,7 @@ namespace AMDiS { ...@@ -645,7 +646,7 @@ namespace AMDiS {
// === Create distributed matrix for Lagrange constraints. === // === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ(mpiComm, MatCreateMPIAIJ(mpiCommGlobal,
lagrangeMap.getRankDofs(), localDofMap.getRankDofs(), lagrangeMap.getRankDofs(), localDofMap.getRankDofs(),
lagrangeMap.getOverallDofs(), nGlobalOverallInterior, lagrangeMap.getOverallDofs(), nGlobalOverallInterior,
2, PETSC_NULL, 2, PETSC_NULL, 2, PETSC_NULL, 2, PETSC_NULL,
...@@ -705,16 +706,16 @@ namespace AMDiS { ...@@ -705,16 +706,16 @@ namespace AMDiS {
schurPrimalData.subSolver = subDomainSolver; schurPrimalData.subSolver = subDomainSolver;
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, nGlobalOverallInterior,
&(schurPrimalData.tmp_vec_b)); &(schurPrimalData.tmp_vec_b));
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), primalDofMap.getOverallDofs(),
&(schurPrimalData.tmp_vec_primal)); &(schurPrimalData.tmp_vec_primal));
MatCreateShell(mpiComm, MatCreateShell(mpiCommGlobal,
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), primalDofMap.getOverallDofs(),
...@@ -724,7 +725,7 @@ namespace AMDiS { ...@@ -724,7 +725,7 @@ namespace AMDiS {
MatShellSetOperation(mat_schur_primal, MATOP_MULT, MatShellSetOperation(mat_schur_primal, MATOP_MULT,
(void(*)(void))petscMultMatSchurPrimal); (void(*)(void))petscMultMatSchurPrimal);
KSPCreate(mpiComm, &ksp_schur_primal); KSPCreate(mpiCommGlobal, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN); KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPGMRES); KSPSetType(ksp_schur_primal, KSPGMRES);
...@@ -742,7 +743,7 @@ namespace AMDiS { ...@@ -742,7 +743,7 @@ namespace AMDiS {
int nRowsRankB = localDofMap.getRankDofs(); int nRowsRankB = localDofMap.getRankDofs();
Mat matBPi; Mat matBPi;
MatCreateMPIAIJ(mpiComm, MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankB, nRowsRankPrimal, nRowsRankB, nRowsRankPrimal,
nGlobalOverallInterior, nRowsOverallPrimal, nGlobalOverallInterior, nRowsOverallPrimal,
30, PETSC_NULL, 30, PETSC_NULL, &matBPi); 30, PETSC_NULL, 30, PETSC_NULL, &matBPi);
...@@ -810,7 +811,7 @@ namespace AMDiS { ...@@ -810,7 +811,7 @@ namespace AMDiS {
MatGetInfo(mat_schur_primal, MAT_GLOBAL_SUM, &minfo); MatGetInfo(mat_schur_primal, MAT_GLOBAL_SUM, &minfo);
MSG("Schur primal matrix nnz = %f\n", minfo.nz_used); MSG("Schur primal matrix nnz = %f\n", minfo.nz_used);
KSPCreate(mpiComm, &ksp_schur_primal); KSPCreate(mpiCommGlobal, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal,
SAME_NONZERO_PATTERN); SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
...@@ -853,20 +854,20 @@ namespace AMDiS { ...@@ -853,20 +854,20 @@ namespace AMDiS {
fetiData.subSolver = subDomainSolver; fetiData.subSolver = subDomainSolver;
fetiData.ksp_schur_primal = &ksp_schur_primal; fetiData.ksp_schur_primal = &ksp_schur_primal;
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, nGlobalOverallInterior,
&(fetiData.tmp_vec_b)); &(fetiData.tmp_vec_b));
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
lagrangeMap.getRankDofs(), lagrangeMap.getRankDofs(),
lagrangeMap.getOverallDofs(), lagrangeMap.getOverallDofs(),
&(fetiData.tmp_vec_lagrange)); &(fetiData.tmp_vec_lagrange));
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), primalDofMap.getOverallDofs(),
&(fetiData.tmp_vec_primal)); &(fetiData.tmp_vec_primal));
MatCreateShell(mpiComm, MatCreateShell(mpiCommGlobal,
lagrangeMap.getRankDofs(), lagrangeMap.getRankDofs(),
lagrangeMap.getRankDofs(), lagrangeMap.getRankDofs(),
lagrangeMap.getOverallDofs(), lagrangeMap.getOverallDofs(),
...@@ -875,7 +876,7 @@ namespace AMDiS { ...@@ -875,7 +876,7 @@ namespace AMDiS {
MatShellSetOperation(mat_feti, MATOP_MULT, (void(*)(void))petscMultMatFeti); MatShellSetOperation(mat_feti, MATOP_MULT, (void(*)(void))petscMultMatFeti);
KSPCreate(mpiComm, &ksp_feti); KSPCreate(mpiCommGlobal, &ksp_feti);
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN); KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_feti, "feti_"); KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES); KSPSetType(ksp_feti, KSPGMRES);
...@@ -913,7 +914,7 @@ namespace AMDiS { ...@@ -913,7 +914,7 @@ namespace AMDiS {
fetiDirichletPreconData.mat_duals_interior = &mat_duals_interior; fetiDirichletPreconData.mat_duals_interior = &mat_duals_interior;
fetiDirichletPreconData.ksp_interior = &ksp_interior; fetiDirichletPreconData.ksp_interior = &ksp_interior;
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, nGlobalOverallInterior,
&(fetiDirichletPreconData.tmp_vec_b)); &(fetiDirichletPreconData.tmp_vec_b));
...@@ -958,7 +959,7 @@ namespace AMDiS { ...@@ -958,7 +959,7 @@ namespace AMDiS {
} }
} }
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
localDofMap.getOverallDofs(), localDofMap.getOverallDofs(),
&(fetiLumpedPreconData.tmp_vec_b)); &(fetiLumpedPreconData.tmp_vec_b));
...@@ -1343,18 +1344,18 @@ namespace AMDiS { ...@@ -1343,18 +1344,18 @@ namespace AMDiS {
// Some temporary vectors. // Some temporary vectors.
Vec tmp_b0, tmp_b1, tmp_lagrange0, tmp_primal0, tmp_primal1; Vec tmp_b0, tmp_b1, tmp_lagrange0, tmp_primal0, tmp_primal1;
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, nGlobalOverallInterior,
&tmp_b0); &tmp_b0);
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, nGlobalOverallInterior,
&tmp_b1); &tmp_b1);
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), &tmp_primal0); primalDofMap.getOverallDofs(), &tmp_primal0);
VecCreateMPI(mpiComm, VecCreateMPI(mpiCommGlobal,
primalDofMap.getRankDofs(), primalDofMap.getRankDofs(),
primalDofMap.getOverallDofs(), &tmp_primal1);