Commit 430cb57b authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Add more support for mixed finite element FETI-DP. Does not work yet.

parent 3f9bb6b2
......@@ -34,6 +34,13 @@ namespace AMDiS {
class GlobalDofMap
{
public:
/// This constructor exists only to create std::map of this class and make
/// use of the operator [] for read access. Should never be called.
GlobalDofMap()
{
ERROR_EXIT("Should not be called!\n");
}
GlobalDofMap(MPI::Intracomm* m)
: mpiComm(m),
nRankDofs(0),
......@@ -103,7 +110,13 @@ namespace AMDiS {
class FeSpaceData
{
public:
FeSpaceData() {}
FeSpaceData()
: mpiComm(NULL),
feSpaces(0),
nRankDofs(-1),
nOverallDofs(-1),
rStartDofs(-1)
{}
void setMpiComm(MPI::Intracomm *m)
{
......@@ -129,17 +142,26 @@ namespace AMDiS {
data.insert(make_pair(feSpace, T(mpiComm)));
}
int getRankDofs(vector<const FiniteElemSpace*> &feSpaces)
int getRankDofs(vector<const FiniteElemSpace*> &fe)
{
FUNCNAME("FeSpaceData::getRankDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++) {
TEST_EXIT_DBG(data.count(feSpaces[i]))("Should not happen!\n");
result += data.find(feSpaces[i])->second.nRankDofs;
for (unsigned int i = 0; i < fe.size(); i++) {
TEST_EXIT_DBG(data.count(fe[i]))("Cannot find FE space: %p\n", fe[i]);
result += data[fe[i]].nRankDofs;
}
return result;
}
inline int getRankDofs()
{
TEST_EXIT_DBG(nRankDofs >= 0)("Should not happen!\n");
return nRankDofs;
}
int getOverallDofs(vector<const FiniteElemSpace*> &feSpaces)
{
int result = 0;
......@@ -151,6 +173,13 @@ namespace AMDiS {
return result;
}
inline int getOverallDofs()
{
TEST_EXIT_DBG(nOverallDofs >= 0)("Should not happen!\n");
return nOverallDofs;
}
int getStartDofs(vector<const FiniteElemSpace*> &feSpaces)
{
int result = 0;
......@@ -162,10 +191,74 @@ namespace AMDiS {
return result;
}
int getStartDofs()
{
TEST_EXIT_DBG(rStartDofs >= 0)("Should not happen!\n");
return rStartDofs;
}
void setFeSpaces(vector<const FiniteElemSpace*> &fe)
{
feSpaces = fe;
for (unsigned int i = 0; i < feSpaces.size(); i++)
addFeSpace(feSpaces[i]);
}
void update()
{
nRankDofs = getRankDofs(feSpaces);
nOverallDofs = getOverallDofs(feSpaces);
rStartDofs = getStartDofs(feSpaces);
}
inline int mapLocal(int index, int ithFeSpace)
{
int result = 0;
for (int i = 0; i < ithFeSpace; i++)
result += data[feSpaces[i]].nRankDofs;
result += data[feSpaces[ithFeSpace]][index];
return result;
}
inline int mapLocal(int index, const FiniteElemSpace *feSpace)
{
for (unsigned int i = 0; i < feSpaces.size(); i++)
if (feSpaces[i] == feSpace)
return mapLocal(index, feSpace, i);
return -1;
}
inline int mapGlobal(int index, int ithFeSpace)
{
int result = rStartDofs;
for (int i = 0; i < ithFeSpace; i++)
result += data[feSpaces[i]].nRankDofs;
result += data[feSpaces[ithFeSpace]][index];
return result;
}
inline int mapGlobal(int index, const FiniteElemSpace *feSpace)
{
for (unsigned int i = 0; i < feSpaces.size(); i++)
if (feSpaces[i] == feSpace)
return mapGlobal(index, feSpace, i);
return -1;
}
private:
MPI::Intracomm* mpiComm;
map<const FiniteElemSpace*, T> data;
vector<const FiniteElemSpace*> feSpaces;
int nRankDofs, nOverallDofs, rStartDofs;
};
}
......
......@@ -248,7 +248,6 @@ namespace AMDiS {
// === Calculate the number of primals that are owned by the rank and ===
// === create local indices of the primals starting at zero. ===
primalDofMap.addFeSpace(feSpace);
for (DofIndexSet::iterator it = primals.begin(); it != primals.end(); ++it)
if (meshDistributor->getIsRankDof(feSpace, *it))
primalDofMap[feSpace].insertRankDof(*it);
......@@ -372,8 +371,6 @@ namespace AMDiS {
// === Create global index of the dual nodes on each rank. ===
dualDofMap.addFeSpace(feSpace);
DofContainer allBoundaryDofs;
meshDistributor->getAllBoundaryDofs(feSpace, allBoundaryDofs);
......@@ -397,8 +394,6 @@ namespace AMDiS {
// === Reserve for each dual node, on the rank that owns this node, the ===
// === appropriate number of Lagrange constraints. ===
lagrangeMap.addFeSpace(feSpace);
int nRankLagrange = 0;
DofMapping& dualMap = dualDofMap[feSpace].getMap();
for (DofMapping::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
......@@ -463,7 +458,6 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::createIndexB()");
localDofMap.addFeSpace(feSpace);
DOFAdmin* admin = feSpace->getAdmin();
// === To ensure that all interior node on each rank are listen first in ===
......@@ -511,9 +505,9 @@ namespace AMDiS {
MatCreateMPIAIJ(PETSC_COMM_WORLD,
lagrangeMap.getRankDofs(feSpaces),
localDofMap.getRankDofs(feSpaces),
localDofMap.getRankDofs(),
lagrangeMap.getOverallDofs(feSpaces),
localDofMap.getOverallDofs(feSpaces),
localDofMap.getOverallDofs(),
2, PETSC_NULL, 2, PETSC_NULL,
&mat_lagrange);
......@@ -560,7 +554,7 @@ namespace AMDiS {
}
void PetscSolverFeti::createSchurPrimalKsp()
void PetscSolverFeti::createSchurPrimalKsp(vector<const FiniteElemSpace*> &feSpaces)
{
FUNCNAME("PetscSolverFeti::createSchurPrimal()");
......@@ -575,7 +569,7 @@ namespace AMDiS {
schurPrimalData.fetiSolver = this;
VecCreateMPI(PETSC_COMM_WORLD,
localDofMap[feSpace].nRankDofs * nComponents, localDofMap[feSpace].nOverallDofs * nComponents,
localDofMap.getRankDofs(), localDofMap.getOverallDofs(),
&(schurPrimalData.tmp_vec_b));
VecCreateMPI(PETSC_COMM_WORLD,
primalDofMap[feSpace].nRankDofs * nComponents, primalDofMap[feSpace].nOverallDofs * nComponents,
......@@ -602,8 +596,8 @@ namespace AMDiS {
int nRowsRankPrimal = primalDofMap[feSpace].nRankDofs * nComponents;
int nRowsOverallPrimal = primalDofMap[feSpace].nOverallDofs * nComponents;
int nRowsRankB = localDofMap[feSpace].nRankDofs * nComponents;
int nRowsOverallB = localDofMap[feSpace].nOverallDofs * nComponents;
int nRowsRankB = localDofMap.getRankDofs();
int nRowsOverallB = localDofMap.getOverallDofs();
Mat matBPi;
MatCreateMPIAIJ(PETSC_COMM_WORLD,
......@@ -618,8 +612,8 @@ namespace AMDiS {
map<int, vector<pair<int, double> > > mat_b_primal_cols;
for (int i = 0; i < localDofMap[feSpace].nRankDofs * nComponents; i++) {
PetscInt row = localDofMap[feSpace].rStartDofs * nComponents + i;
for (int i = 0; i < nRowsRankB; i++) {
PetscInt row = localDofMap.getStartDofs() + i;
MatGetRow(mat_b_primal, row, &nCols, &cols, &values);
for (int j = 0; j < nCols; j++)
......@@ -637,7 +631,7 @@ namespace AMDiS {
for (map<int, vector<pair<int, double> > >::iterator it = mat_b_primal_cols.begin();
it != mat_b_primal_cols.end(); ++it) {
Vec tmpVec;
VecCreateSeq(PETSC_COMM_SELF, localDofMap[feSpace].nRankDofs * nComponents, &tmpVec);
VecCreateSeq(PETSC_COMM_SELF, nRowsRankB, &tmpVec);
for (unsigned int i = 0; i < it->second.size(); i++)
VecSetValue(tmpVec,
......@@ -650,9 +644,9 @@ namespace AMDiS {
PetscScalar *tmpValues;
VecGetArray(tmpVec, &tmpValues);
for (int i = 0; i < localDofMap[feSpace].nRankDofs * nComponents; i++)
for (int i = 0; i < nRowsRankB; i++)
MatSetValue(matBPi,
localDofMap[feSpace].rStartDofs * nComponents + i,
localDofMap.getStartDofs() + i,
it->first,
tmpValues[i],
ADD_VALUES);
......@@ -706,7 +700,7 @@ namespace AMDiS {
}
void PetscSolverFeti::createFetiKsp()
void PetscSolverFeti::createFetiKsp(vector<const FiniteElemSpace*> &feSpaces)
{
FUNCNAME("PetscSolverFeti::createFetiKsp()");
......@@ -721,14 +715,15 @@ namespace AMDiS {
fetiData.ksp_schur_primal = &ksp_schur_primal;
VecCreateMPI(PETSC_COMM_WORLD,
localDofMap[feSpace].nRankDofs * nComponents, localDofMap[feSpace].nOverallDofs * nComponents,
localDofMap.getRankDofs(), localDofMap.getOverallDofs(),
&(fetiData.tmp_vec_b));
VecCreateMPI(PETSC_COMM_WORLD,
lagrangeMap[feSpace].nRankDofs * nComponents,
lagrangeMap[feSpace].nOverallDofs * nComponents,
&(fetiData.tmp_vec_lagrange));
VecCreateMPI(PETSC_COMM_WORLD,
primalDofMap[feSpace].nRankDofs * nComponents, primalDofMap[feSpace].nOverallDofs * nComponents,
primalDofMap[feSpace].nRankDofs * nComponents,
primalDofMap[feSpace].nOverallDofs * nComponents,
&(fetiData.tmp_vec_primal));
MatCreateShell(PETSC_COMM_WORLD,
......@@ -769,11 +764,14 @@ namespace AMDiS {
fetiDirichletPreconData.ksp_interior = &ksp_interior;
VecCreateMPI(PETSC_COMM_WORLD,
localDofMap[feSpace].nRankDofs * nComponents, localDofMap[feSpace].nOverallDofs * nComponents,
localDofMap.getRankDofs(),localDofMap.getOverallDofs(),
&(fetiDirichletPreconData.tmp_vec_b));
MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals1));
MatGetVecs(mat_interior_interior, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_interior));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals1));
MatGetVecs(mat_interior_interior, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_interior));
KSPGetPC(ksp_feti, &precon_feti);
PCSetType(precon_feti, PCSHELL);
......@@ -787,10 +785,13 @@ namespace AMDiS {
fetiLumpedPreconData.mat_duals_duals = &mat_duals_duals;
VecCreateMPI(PETSC_COMM_WORLD,
localDofMap[feSpace].nRankDofs * nComponents, localDofMap[feSpace].nOverallDofs * nComponents,
localDofMap.getRankDofs(),
localDofMap.getOverallDofs(),
&(fetiLumpedPreconData.tmp_vec_b));
MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiLumpedPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiLumpedPreconData.tmp_vec_duals1));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(fetiLumpedPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(fetiLumpedPreconData.tmp_vec_duals1));
KSPGetPC(ksp_feti, &precon_feti);
PCSetType(precon_feti, PCSHELL);
......@@ -926,7 +927,13 @@ namespace AMDiS {
for (DofMapping::iterator it = localDofMap[feSpace].getMap().begin();
it != localDofMap[feSpace].getMap().end(); ++it) {
#if 1
int petscIndex = localDofMap.mapLocal(it->first, i);
#else
int petscIndex = it->second * nComponents + i;
#endif
dofVec[it->first] = localSolB[petscIndex];
}
......@@ -959,18 +966,28 @@ namespace AMDiS {
dualDofMap.setMpiComm(mpiComm);
lagrangeMap.setMpiComm(mpiComm);
localDofMap.setMpiComm(mpiComm);
primalDofMap.setFeSpaces(feSpaces);
dualDofMap.setFeSpaces(feSpaces);
lagrangeMap.setFeSpaces(feSpaces);
localDofMap.setFeSpaces(feSpaces);
updateDofData();
primalDofMap.update();
dualDofMap.update();
lagrangeMap.update();
localDofMap.update();
// === Create matrices for the FETI-DP method. ===
int nRowsRankB = localDofMap.getRankDofs(feSpaces);
int nRowsOverallB = localDofMap.getOverallDofs(feSpaces);
int nRowsRankB = localDofMap.getRankDofs();
int nRowsOverallB = localDofMap.getOverallDofs();
int nRowsRankPrimal = primalDofMap.getRankDofs(feSpaces);
int nRowsOverallPrimal = primalDofMap.getOverallDofs(feSpaces);
int nRowsDual = dualDofMap.getRankDofs(feSpaces);
int nRowsInterior = nRowsRankB - nRowsDual;
MatCreateSeqAIJ(PETSC_COMM_SELF, nRowsRankB, nRowsRankB, 30, PETSC_NULL,
&mat_b_b);
......@@ -1071,36 +1088,24 @@ namespace AMDiS {
bool colPrimal = primalDofMap[feSpace].isSet(col(*icursor));
if (colPrimal) {
// Column is a primal variable.
int colIndex =
primalDofMap[feSpace][col(*icursor)] * nComponents + j;
if (rowPrimal) {
cols.push_back(colIndex);
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
} else {
colsOther.push_back(colIndex);
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
}
} else {
// Column is not a primal variable.
int colIndex =
(localDofMap[feSpace][col(*icursor)] +
localDofMap[feSpace].rStartDofs) * nComponents + j;
if (rowPrimal) {
colsOther.push_back(colIndex);
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
} else {
cols.push_back(colIndex);
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
}
}
// === For preconditioner ===
if (!rowPrimal && !colPrimal) {
......@@ -1109,7 +1114,11 @@ namespace AMDiS {
if (rowIndex < nLocalInterior) {
if (colIndex < nLocalInterior) {
#if 1
int colIndex2 = localDofMap.mapLocal(col(*icursor), j);
#else
int colIndex2 = localDofMap[feSpace][col(*icursor)] * nComponents + j;
#endif
colsLocal.push_back(colIndex2);
valuesLocal.push_back(value(*icursor));
......@@ -1122,7 +1131,11 @@ namespace AMDiS {
}
} else {
if (colIndex < nLocalInterior) {
#if 1
int colIndex2 = localDofMap.mapLocal(col(*icursor), j);
#else
int colIndex2 = localDofMap[feSpace][col(*icursor)] * nComponents + j;
#endif
colsLocalOther.push_back(colIndex2);
valuesLocalOther.push_back(value(*icursor));
......@@ -1139,31 +1152,67 @@ namespace AMDiS {
} // for each nnz in row
// === Set matrix values. ===
if (rowPrimal) {
int rowIndex =
primalDofMap[feSpace][*cursor] * nComponents + i;
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = primalDofMap[feSpace][cols[k]] * nComponents + j;
MatSetValues(mat_primal_primal, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size())
if (colsOther.size()) {
for (unsigned int k = 0; k < colsOther.size(); k++)
#if 1
colsOther[k] = localDofMap.mapGlobal(colsOther[k], j);
#else
colsOther[k] = (localDofMap[feSpace][colsOther[k]] +
localDofMap[feSpace].rStartDofs) * nComponents + j;
#endif
MatSetValues(mat_primal_b, 1, &rowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
} else {
#if 1
int localRowIndex = localDofMap.mapLocal(*cursor, i);
#else
int localRowIndex = localDofMap[feSpace][*cursor] * nComponents + i;
#endif
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] -= localDofMap[feSpace].rStartDofs * nComponents;
#if 1
cols[k] = localDofMap.mapLocal(cols[k], j);
#else
cols[k] = localDofMap[feSpace][cols[k]] * nComponents + j;
#endif
MatSetValues(mat_b_b, 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size()) {
#if 1
int globalRowIndex = localDofMap.mapGlobal(*cursor, i);
#else
int globalRowIndex =
(localDofMap[feSpace][*cursor] + localDofMap[feSpace].rStartDofs) * nComponents + i;
#endif
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] =
primalDofMap[feSpace][colsOther[k]] * nComponents + j;
MatSetValues(mat_b_primal, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
}
// === For preconditioner ===
// === Set matrix values for preconditioner ===
if (!rowPrimal) {
switch (fetiPreconditioner) {
......@@ -1265,12 +1314,12 @@ namespace AMDiS {
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp();
createSchurPrimalKsp(feSpaces);
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp();
createFetiKsp(feSpaces);
}
......@@ -1278,12 +1327,12 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::fillPetscRhs()");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(vec);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nComponents = vec->getSize();
VecCreateMPI(PETSC_COMM_WORLD,
localDofMap[feSpace].nRankDofs * nComponents,
localDofMap[feSpace].nOverallDofs * nComponents, &f_b);
localDofMap.getRankDofs(), localDofMap.getOverallDofs(), &f_b);
VecCreateMPI(PETSC_COMM_WORLD,
primalDofMap[feSpace].nRankDofs * nComponents,
primalDofMap[feSpace].nOverallDofs * nComponents, &f_primal);
......@@ -1297,7 +1346,12 @@ namespace AMDiS {
double value = *dofIt;
VecSetValues(f_primal, 1, &index, &value, ADD_VALUES);
} else {
#if 1
index = localDofMap.mapGlobal(index, i);
#else
index = (localDofMap[feSpace][index] + localDofMap[feSpace].rStartDofs) * nComponents + i;
#endif
VecSetValue(f_b, index, *dofIt, INSERT_VALUES);
}
}
......@@ -1315,17 +1369,14 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::solveLocalProblem()");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
Vec tmp;
VecCreateSeq(PETSC_COMM_SELF, localDofMap[feSpace].nRankDofs * nComponents, &tmp);
PetscScalar *tmpValues;
VecGetArray(tmp, &tmpValues);
VecCreateSeq(PETSC_COMM_SELF, localDofMap.getRankDofs(), &tmp);
PetscScalar *rhsValues;
PetscScalar *tmpValues, *rhsValues;
VecGetArray(tmp, &tmpValues);
VecGetArray(rhs, &rhsValues);
for (int i = 0; i < localDofMap[feSpace].nRankDofs * nComponents; i++)
for (int i = 0; i < localDofMap.getRankDofs(); i++)
tmpValues[i] = rhsValues[i];
VecRestoreArray(rhs, &rhsValues);
......@@ -1334,13 +1385,12 @@ namespace AMDiS {
KSPSolve(ksp_b, tmp, tmp);
VecGetArray(tmp, &tmpValues);
PetscScalar *solValues;
VecGetArray(sol, &solValues);
VecGetArray(sol, &rhsValues);
for (int i = 0; i < localDofMap[feSpace].nRankDofs * nComponents; i++)
solValues[i] = tmpValues[i];
for (int i = 0; i < localDofMap.getRankDofs(); i++)
rhsValues[i] = tmpValues[i];
VecRestoreArray(sol, &solValues);
VecRestoreArray(sol, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
VecDestroy(&tmp);
......@@ -1351,11 +1401,11 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::solveFetiMatrix()");
#if 0
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nOverallNest =
(localDofMap[feSpace].nOverallDofs +
primalDofMap[feSpace].nOverallDofs +
int nOverallNest = localDofMap.getOverallDofs() +
(primalDofMap[feSpace].nOverallDofs +
lagrangeMap[feSpace].nOverallDofs) * nComponents;
Mat mat_lagrange_transpose;
......@@ -1546,6 +1596,8 @@ namespace AMDiS {
recoverSolution(u_b, u_primal, vec);
KSPDestroy(&ksp);
#endif
}
......
......@@ -94,13 +94,13 @@ namespace AMDiS {
/// Creates PETSc KSP solver object for solving the Schur complement
/// system on the primal variables, \ref ksp_schur_primal
void createSchurPrimalKsp();
void createSchurPrimalKsp(vector<const FiniteElemSpace*> &feSpaces);
/// Destroys PETSc KSP solver object \ref ksp_schur_primal