Commit 11446cdf authored by Thomas Witkowski's avatar Thomas Witkowski

Blub, go home.

parent 5d9744da
......@@ -346,7 +346,6 @@ namespace AMDiS {
init(mld, feSpaces, feSpaces, isNonLocal);
}
void setMpiComm(MPI::Intracomm &m, int l);
/// Clear all data.
......@@ -389,6 +388,12 @@ namespace AMDiS {
return data.find(feSpace)->second;
}
/// Returns the number of solution components the mapping is defined on.
inline int getNumberOfComponents() const
{
return static_cast<int>(feSpaces.size());
}
/// Returns \ref nRankDofs, thus the number of DOFs owned by the rank.
inline int getRankDofs()
{
......
......@@ -42,11 +42,22 @@ namespace AMDiS {
}
void PetscSolver::setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs)
void PetscSolver::setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component = -1)
{
interiorMap = interiorDofs;
coarseSpaceMap = coarseDofs;
FUNCNAME("PetscSolver::setCoarseSpaceDofMapping()");
TEST_EXIT_DBG(coarseDofs)("Should not happen!\n");
if (component == -1) {
coarseSpaceMap.clear();
int nComponents = coarseDofs->getNumberOfComponents();
for (int i = 0; i < nComponents; i++)
coarseSpaceMap[i] = coarseDofs;
} else {
coarseSpaceMap[component] = coarseDofs;
}
}
......
......@@ -65,8 +65,14 @@ namespace AMDiS {
subdomainLevel = l;
}
void setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs = NULL);
/// Set parallel DOF mapping for the interior DOFs.
void setDofMapping(ParallelDofMapping *interiorDofs)
{
interiorMap = interiorDofs;
}
void setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component = -1);
/** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
......@@ -140,21 +146,26 @@ namespace AMDiS {
constNullspaceComponent.push_back(component);
}
inline bool isCoarseSpace(const FiniteElemSpace *feSpace,
inline bool isCoarseSpace(int component,
const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
FUNCNAME("PetscSolver::isCoarseSpace()");
if (coarseSpaceMap == NULL)
if (coarseSpaceMap.empty())
return false;
return (*coarseSpaceMap)[feSpace].isSet(dof);
TEST_EXIT_DBG(coarseSpaceMap.count(component))
("Component %d has no coarse space defined!\n", component);
return (*(coarseSpaceMap[component]))[feSpace].isSet(dof);
}
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("PetscSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return rhsCoarseSpace;
......@@ -173,7 +184,8 @@ namespace AMDiS {
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("PetscSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return matCoarseCoarse;
......@@ -182,7 +194,8 @@ namespace AMDiS {
inline Mat& getMatIntCoarse()
{
FUNCNAME("PetscSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return matIntCoarse;
......@@ -191,7 +204,8 @@ namespace AMDiS {
inline Mat& getMatCoarseInt()
{
FUNCNAME("PetscSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap)
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return matCoarseInt;
......@@ -235,7 +249,9 @@ namespace AMDiS {
ParallelDofMapping *interiorMap;
ParallelDofMapping* coarseSpaceMap;
/// Parallel DOF mapping of the (optional) coarse space. Allows to define
/// different coarse spaces for different components.
map<int, ParallelDofMapping*> coarseSpaceMap;
int mpiRank;
......@@ -243,8 +259,11 @@ namespace AMDiS {
MPI::Intracomm mpiCommLocal;
vector<vector<mat> >
/// Petsc's matrix structure.
Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
//Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
......
......@@ -21,7 +21,7 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
if (coarseSpaceMap != NULL) {
if (coarseSpaceMap.size()) {
updateSubdomainData();
fillPetscMatrixWithCoarseSpace(mat);
return;
......@@ -152,7 +152,7 @@ namespace AMDiS {
meshDistributor->getElementObjectDb(),
localMatrix);
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
......@@ -172,7 +172,7 @@ namespace AMDiS {
}
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
int nRowsRankCoarse = coarseSpaceMap->getRankDofs();
int nRowsOverallCoarse = coarseSpaceMap->getOverallDofs();
......@@ -221,6 +221,9 @@ namespace AMDiS {
if (!(*mat)[i][j])
continue;
ParallelDofMapping *rowCoarseSpace = coarseSpaceMap[i];
ParallelDofMapping *colCoarseSpace = coarseSpaceMap[j];
traits::col<Matrix>::type col((*mat)[i][j]->getBaseMatrix());
traits::const_value<Matrix>::type value((*mat)[i][j]->getBaseMatrix());
......@@ -228,7 +231,7 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>((*mat)[i][j]->getBaseMatrix()),
cend = end<row>((*mat)[i][j]->getBaseMatrix()); cursor != cend; ++cursor) {
bool rowPrimal = isCoarseSpace(feSpaces[i], *cursor);
bool isRowCoarse = isCoarseSpace(i, feSpaces[i], *cursor);
cols.clear();
colsOther.clear();
......@@ -239,10 +242,10 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
bool colPrimal = isCoarseSpace(feSpaces[j], col(*icursor));
bool isColCoarse = isCoarseSpace(j, feSpaces[j], col(*icursor));
if (colPrimal) {
if (rowPrimal) {
if (isColCoarse) {
if (isRowCoarse) {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
} else {
......@@ -250,7 +253,7 @@ namespace AMDiS {
valuesOther.push_back(value(*icursor));
}
} else {
if (rowPrimal) {
if (isRowCoarse) {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
} else {
......@@ -263,10 +266,10 @@ namespace AMDiS {
// === Set matrix values. ===
if (rowPrimal) {
int rowIndex = coarseSpaceMap->getMatIndex(i, *cursor);
if (isRowCoarse) {
int rowIndex = rowCoarseSpace->getMatIndex(i, *cursor);
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = coarseSpaceMap->getMatIndex(j, cols[k]);
cols[k] = colCoarseSpace->getMatIndex(j, cols[k]);
MatSetValues(matCoarseCoarse, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
......@@ -306,7 +309,7 @@ namespace AMDiS {
globalRowIndex += rStartInterior;
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = coarseSpaceMap->getMatIndex(j, colsOther[k]);
colsOther[k] = colCoarseSpace->getMatIndex(j, colsOther[k]);
MatSetValues(matIntCoarse, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
......@@ -323,7 +326,7 @@ namespace AMDiS {
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
MatAssemblyBegin(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
......@@ -364,7 +367,7 @@ namespace AMDiS {
nGlobalOverallInterior,
&rhsInterior);
if (coarseSpaceMap)
if (coarseSpaceMap.size())
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
......@@ -375,7 +378,7 @@ namespace AMDiS {
// === Transfer values from DOF vector to the PETSc vector. ===
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
for (int i = 0; i < vec->getSize(); i++)
setDofVector(rhsInterior, rhsCoarseSpace, vec->getDOFVector(i), i);
} else {
......@@ -386,7 +389,7 @@ namespace AMDiS {
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
......@@ -461,7 +464,7 @@ namespace AMDiS {
// === Remove null space, if requested. ===
if (removeRhsNullspace) {
TEST_EXIT_DBG(coarseSpaceMap == NULL)("Not supported!\n");
TEST_EXIT_DBG(coarseSpaceMap.empty())("Not supported!\n");
MatNullSpaceRemove(matNullspace, rhsInterior, PETSC_NULL);
}
......@@ -561,7 +564,7 @@ namespace AMDiS {
MatDestroy(&matIntInt);
KSPDestroy(&kspInterior);
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
MatDestroy(&matCoarseCoarse);
MatDestroy(&matCoarseInt);
MatDestroy(&matIntCoarse);
......@@ -580,7 +583,7 @@ namespace AMDiS {
VecDestroy(&rhsInterior);
if (coarseSpaceMap)
if (coarseSpaceMap.size())
VecDestroy(&rhsCoarseSpace);
}
......@@ -603,7 +606,7 @@ namespace AMDiS {
nnzInterior.clear();
if (coarseSpaceMap) {
if (coarseSpaceMap.size()) {
nnzCoarse.clear();
nnzCoarseInt.clear();
nnzIntCoarse.clear();
......@@ -847,6 +850,10 @@ namespace AMDiS {
const FiniteElemSpace *feSpace = vec->getFeSpace();
PeriodicMap &perMap = meshDistributor->getPeriodicMap();
ParallelDofMapping *rowCoarseSpace = NULL;
if (coarseSpaceMap.size())
rowCoarseSpace = coarseSpaceMap[nRowVec];
// Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
......@@ -855,10 +862,10 @@ namespace AMDiS {
if (rankOnly && !(*interiorMap)[feSpace].isRankDof(dofIt.getDOFIndex()))
continue;
if (isCoarseSpace(feSpace, dofIt.getDOFIndex())) {
if (isCoarseSpace(nRowVec, feSpace, dofIt.getDOFIndex())) {
TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n");
int index = coarseSpaceMap->getMatIndex(nRowVec, dofIt.getDOFIndex());
int index = rowCoarseSpace->getMatIndex(nRowVec, dofIt.getDOFIndex());
VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES);
} else {
// Calculate global row index of the DOF.
......@@ -898,87 +905,13 @@ namespace AMDiS {
void PetscSolverGlobalMatrix::removeDirichletBcDofs(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverGlobalMatrix::removeDirichletBcDofs()");
#if 0
vector<int> dofsInterior, dofsCoarse;
int nComponents = mat->getNumRows();
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if ((*mat)[i][j]) {
const FiniteElemSpace *feSpace = (*mat)[i][j]->getRowFeSpace();
std::set<DegreeOfFreedom> &dirichletDofs = *((*mat)[i][j]->getApplyDBCs());
MSG("DIRICHLET DOFS: %d %d -> %d\n", i, j, dirichletDofs.size());
for (std::set<DegreeOfFreedom>::iterator it = dirichletDofs.begin();
it != dirichletDofs.end(); ++it) {
if (isCoarseSpace(feSpace, *it)) {
if ((*coarseSpaceMap)[feSpace].isRankDof(*it)) {
int globalDof = (*coarseSpaceMap)[feSpace][*it].global;
dofsCoarse.push_back(coarseSpaceMap->getMatIndex(i, globalDof));
}
} else {
if ((*interiorMap)[feSpace].isRankDof(*it)) {
int globalDof = (*interiorMap)[feSpace][*it].global;
dofsInterior.push_back(interiorMap->getMatIndex(i, globalDof));
}
}
}
} else {
MSG("NO MAT DIAG in %d\n", i);
}
}
}
MatZeroRows(matIntInt, dofsInterior.size(), &(dofsInterior[0]), 1.0,
PETSC_NULL, PETSC_NULL);
if (coarseSpaceMap != NULL)
MatZeroRows(matCoarseCoarse, dofsCoarse.size(), &(dofsCoarse[0]), 1.0,
PETSC_NULL, PETSC_NULL);
#endif
FUNCNAME("PetscSolverGlobalMatrix::removeDirichletBcDofs()");
}
void PetscSolverGlobalMatrix::removeDirichletBcDofs(SystemVector *vec)
{
FUNCNAME("PetscSolverGlobalMatrix::removeDirichletBcDofs()");
int nComponents = vec->getSize();
for (int i = 0; i < nComponents; i++) {
const FiniteElemSpace *feSpace = vec->getDOFVector(i)->getFeSpace();
map<DegreeOfFreedom, double> &dirichletValues =
vec->getDOFVector(i)->getDirichletValues();
for (map<DegreeOfFreedom, double>::iterator it = dirichletValues.begin();
it != dirichletValues.end(); ++it) {
if (isCoarseSpace(feSpace, it->first)) {
if ((*coarseSpaceMap)[feSpace].isRankDof(it->first)) {
int globalDof = (*coarseSpaceMap)[feSpace][it->first].global;
VecSetValue(rhsCoarseSpace, coarseSpaceMap->getMatIndex(i, globalDof),
it->second, INSERT_VALUES);
}
} else {
if ((*interiorMap)[feSpace].isRankDof(it->first)) {
int globalDof = (*interiorMap)[feSpace][it->first].global;
VecSetValue(rhsInterior, interiorMap->getMatIndex(i, globalDof),
it->second, INSERT_VALUES);
}
}
}
}
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment