Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 8f20263c authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Replaces subdomain solver in FETI-DP to be a general PetscSolver.

parent f1724e2f
......@@ -16,6 +16,7 @@
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
#include "parallel/SubDomainSolver.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "io/VtkWriter.h"
namespace AMDiS {
......@@ -233,7 +234,7 @@ namespace AMDiS {
MeshLevelData& levelData = meshDistributor->getMeshLevelData();
if (subDomainSolver == NULL) {
subDomainSolver = new SubDomainSolver();
subDomainSolver = new PetscSolverGlobalMatrix();
if (meshLevel == 0) {
subDomainSolver->setMeshDistributor(meshDistributor,
......
......@@ -259,7 +259,7 @@ namespace AMDiS {
bool multiLevelTest;
SubDomainSolver *subDomainSolver;
PetscSolver *subDomainSolver;
int meshLevel;
......
......@@ -24,7 +24,7 @@
#define AMDIS_PETSC_SOLVER_FETI_STRUCTS_H
#include <map>
#include "parallel/SubDomainSolver.h"
#include "parallel/PetscSolver.h"
namespace AMDiS {
......@@ -43,7 +43,7 @@ namespace AMDiS {
/// Temporal vecor in the primal variables.
Vec tmp_vec_primal;
SubDomainSolver* subSolver;
PetscSolver* subSolver;
};
......@@ -65,7 +65,7 @@ namespace AMDiS {
/// Temporal vector on the lagrange variables.
Vec tmp_vec_lagrange;
SubDomainSolver* subSolver;
PetscSolver* subSolver;
/// Pointer to the solver of the schur complement on the primal variables.
KSP *ksp_schur_primal;
......
......@@ -21,6 +21,11 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
if (coarseSpaceMap != NULL) {
fillPetscMatrixWithCoarseSpace(mat);
return;
}
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
......@@ -124,10 +129,210 @@ namespace AMDiS {
}
void PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace()");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
int nRowsRankInterior = interiorMap->getRankDofs();
int nRowsOverallInterior = interiorMap->getOverallDofs();
if (subdomainLevel == 0) {
nGlobalOverallInterior = nRowsOverallInterior;
MatCreateSeqAIJ(mpiCommLocal, nRowsRankInterior, nRowsRankInterior,
60, PETSC_NULL, &matIntInt);
} else {
MatCreateMPIAIJ(mpiCommLocal,
nRowsRankInterior, nRowsRankInterior,
nRowsOverallInterior, nRowsOverallInterior,
60, PETSC_NULL, 60, PETSC_NULL, &matIntInt);
}
if (coarseSpaceMap) {
int nRowsRankCoarse = coarseSpaceMap->getRankDofs();
int nRowsOverallCoarse = coarseSpaceMap->getOverallDofs();
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankCoarse,
nRowsOverallCoarse, nRowsOverallCoarse,
60, PETSC_NULL, 60, PETSC_NULL, &matCoarseCoarse);
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankInterior,
nRowsOverallCoarse, nGlobalOverallInterior,
60, PETSC_NULL, 60, PETSC_NULL, &matCoarseInt);
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankInterior, nRowsRankCoarse,
nGlobalOverallInterior, nRowsOverallCoarse,
60, PETSC_NULL, 60, PETSC_NULL, &matIntCoarse);
}
// === Prepare traverse of sequentially created matrices. ===
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits = mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> cols, colsOther;
vector<double> values, valuesOther;
cols.reserve(300);
colsOther.reserve(300);
values.reserve(300);
valuesOther.reserve(300);
// === Traverse all sequentially created matrices and add the values to ===
// === the global PETSc matrices. ===
int nComponents = mat->getSize();
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if (!(*mat)[i][j])
continue;
traits::col<Matrix>::type col((*mat)[i][j]->getBaseMatrix());
traits::const_value<Matrix>::type value((*mat)[i][j]->getBaseMatrix());
// Traverse all rows.
for (cursor_type cursor = begin<row>((*mat)[i][j]->getBaseMatrix()),
cend = end<row>((*mat)[i][j]->getBaseMatrix()); cursor != cend; ++cursor) {
bool rowPrimal = isCoarseSpace(feSpaces[i], *cursor);
cols.clear();
colsOther.clear();
values.clear();
valuesOther.clear();
// Traverse all columns.
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
bool colPrimal = isCoarseSpace(feSpaces[j], col(*icursor));
if (colPrimal) {
if (rowPrimal) {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
} else {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
}
} else {
if (rowPrimal) {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
} else {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
}
}
} // for each nnz in row
// === Set matrix values. ===
if (rowPrimal) {
int rowIndex = coarseSpaceMap->getMatIndex(i, *cursor);
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = coarseSpaceMap->getMatIndex(j, cols[k]);
MatSetValues(matCoarseCoarse, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size()) {
if (subdomainLevel == 0) {
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = interiorMap->getMatIndex(j, colsOther[k]);
} else {
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] =
interiorMap->getMatIndex(j, colsOther[k]) + rStartInterior;
}
MatSetValues(matCoarseInt, 1, &rowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
} else {
int localRowIndex =
(subdomainLevel == 0 ? interiorMap->getLocalMatIndex(i, *cursor) :
interiorMap->getMatIndex(i, *cursor));
for (unsigned int k = 0; k < cols.size(); k++) {
if (subdomainLevel == 0)
cols[k] = interiorMap->getLocalMatIndex(j, cols[k]);
else
cols[k] = interiorMap->getMatIndex(j, cols[k]);
}
MatSetValues(matIntInt, 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size()) {
int globalRowIndex = interiorMap->getMatIndex(i, *cursor);
if (subdomainLevel != 0)
globalRowIndex += rStartInterior;
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = coarseSpaceMap->getMatIndex(j, colsOther[k]);
MatSetValues(matIntCoarse, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
}
}
}
}
// === Start global assembly procedure. ===
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
if (coarseSpaceMap) {
MatAssemblyBegin(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matIntCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matCoarseInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matCoarseInt, MAT_FINAL_ASSEMBLY);
}
// === Create solver for the non primal (thus local) variables. ===
KSPCreate(mpiCommLocal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(kspInterior, "interior_");
KSPSetType(kspInterior, KSPPREONLY);
PC pcInterior;
KSPGetPC(kspInterior, &pcInterior);
PCSetType(pcInterior, PCLU);
if (subdomainLevel == 0)
PCFactorSetMatSolverPackage(pcInterior, MATSOLVERUMFPACK);
else
PCFactorSetMatSolverPackage(pcInterior, MATSOLVERMUMPS);
KSPSetFromOptions(kspInterior);
}
void PetscSolverGlobalMatrix::fillPetscRhs(SystemVector *vec)
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()");
if (coarseSpaceMap) {
fillPetscRhsWithCoarseSpace(vec);
return;
}
TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel DOF map defined!\n");
......@@ -159,6 +364,47 @@ namespace AMDiS {
}
void PetscSolverGlobalMatrix::fillPetscRhsWithCoarseSpace(SystemVector *vec)
{
FUNCNAME("SubDomainSolver::fillPetscRhs()");
VecCreateMPI(mpiCommGlobal,
interiorMap->getRankDofs(),
nGlobalOverallInterior,
&rhsInterior);
if (coarseSpaceMap)
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
&rhsCoarseSpace);
for (int i = 0; i < vec->getSize(); i++) {
const FiniteElemSpace *feSpace = vec->getDOFVector(i)->getFeSpace();
DOFVector<double>::Iterator dofIt(vec->getDOFVector(i), USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
int index = dofIt.getDOFIndex();
if (isCoarseSpace(feSpace, index)) {
index = coarseSpaceMap->getMatIndex(i, index);
VecSetValue(rhsCoarseSpace, index, *dofIt, ADD_VALUES);
} else {
index = interiorMap->getMatIndex(i, index) + rStartInterior;
VecSetValue(rhsInterior, index, *dofIt, ADD_VALUES);
}
}
}
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
}
void PetscSolverGlobalMatrix::solvePetscMatrix(SystemVector &vec,
AdaptInfo *adaptInfo)
{
......@@ -202,6 +448,44 @@ namespace AMDiS {
}
void PetscSolverGlobalMatrix::solveGlobal(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolverGlobalMatrix::solveGlobal()");
Vec tmp;
if (mpiCommLocal.Get_size() == 1)
VecCreateSeq(mpiCommLocal, interiorMap->getRankDofs(), &tmp);
else
VecCreateMPI(mpiCommLocal,
interiorMap->getRankDofs(),
interiorMap->getOverallDofs(),
&tmp);
PetscScalar *tmpValues, *rhsValues;
VecGetArray(tmp, &tmpValues);
VecGetArray(rhs, &rhsValues);
for (int i = 0; i < interiorMap->getRankDofs(); i++)
tmpValues[i] = rhsValues[i];
VecRestoreArray(rhs, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
KSPSolve(kspInterior, tmp, tmp);
VecGetArray(tmp, &tmpValues);
VecGetArray(sol, &rhsValues);
for (int i = 0; i < interiorMap->getRankDofs(); i++)
rhsValues[i] = tmpValues[i];
VecRestoreArray(sol, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
VecDestroy(&tmp);
}
void PetscSolverGlobalMatrix::destroyMatrixData()
{
FUNCNAME("PetscSolverGlobalMatrix::destroyMatrixData()");
......
......@@ -49,10 +49,16 @@ namespace AMDiS {
void fillPetscMatrix(Matrix<DOFMatrix*> *mat);
void fillPetscMatrixWithCoarseSpace(Matrix<DOFMatrix*> *mat);
void fillPetscRhs(SystemVector *vec);
void fillPetscRhsWithCoarseSpace(SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void solveGlobal(Vec &rhs, Vec &sol);
void destroyMatrixData();
void destroyVectorData();
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment