Commit 430cb57b authored by Thomas Witkowski's avatar Thomas Witkowski

Add more support for mixed finite element FETI-DP. Does not work yet.

parent 3f9bb6b2
......@@ -34,6 +34,13 @@ namespace AMDiS {
class GlobalDofMap
{
public:
/// This constructor exists only to create std::map of this class and make
/// use of the operator [] for read access. Should never be called.
GlobalDofMap()
{
ERROR_EXIT("Should not be called!\n");
}
GlobalDofMap(MPI::Intracomm* m)
: mpiComm(m),
nRankDofs(0),
......@@ -103,7 +110,13 @@ namespace AMDiS {
class FeSpaceData
{
public:
FeSpaceData() {}
FeSpaceData()
: mpiComm(NULL),
feSpaces(0),
nRankDofs(-1),
nOverallDofs(-1),
rStartDofs(-1)
{}
void setMpiComm(MPI::Intracomm *m)
{
......@@ -129,17 +142,26 @@ namespace AMDiS {
data.insert(make_pair(feSpace, T(mpiComm)));
}
int getRankDofs(vector<const FiniteElemSpace*> &feSpaces)
int getRankDofs(vector<const FiniteElemSpace*> &fe)
{
FUNCNAME("FeSpaceData::getRankDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++) {
TEST_EXIT_DBG(data.count(feSpaces[i]))("Should not happen!\n");
result += data.find(feSpaces[i])->second.nRankDofs;
for (unsigned int i = 0; i < fe.size(); i++) {
TEST_EXIT_DBG(data.count(fe[i]))("Cannot find FE space: %p\n", fe[i]);
result += data[fe[i]].nRankDofs;
}
return result;
}
inline int getRankDofs()
{
TEST_EXIT_DBG(nRankDofs >= 0)("Should not happen!\n");
return nRankDofs;
}
int getOverallDofs(vector<const FiniteElemSpace*> &feSpaces)
{
int result = 0;
......@@ -151,6 +173,13 @@ namespace AMDiS {
return result;
}
inline int getOverallDofs()
{
TEST_EXIT_DBG(nOverallDofs >= 0)("Should not happen!\n");
return nOverallDofs;
}
int getStartDofs(vector<const FiniteElemSpace*> &feSpaces)
{
int result = 0;
......@@ -162,10 +191,74 @@ namespace AMDiS {
return result;
}
int getStartDofs()
{
TEST_EXIT_DBG(rStartDofs >= 0)("Should not happen!\n");
return rStartDofs;
}
void setFeSpaces(vector<const FiniteElemSpace*> &fe)
{
feSpaces = fe;
for (unsigned int i = 0; i < feSpaces.size(); i++)
addFeSpace(feSpaces[i]);
}
void update()
{
nRankDofs = getRankDofs(feSpaces);
nOverallDofs = getOverallDofs(feSpaces);
rStartDofs = getStartDofs(feSpaces);
}
inline int mapLocal(int index, int ithFeSpace)
{
int result = 0;
for (int i = 0; i < ithFeSpace; i++)
result += data[feSpaces[i]].nRankDofs;
result += data[feSpaces[ithFeSpace]][index];
return result;
}
inline int mapLocal(int index, const FiniteElemSpace *feSpace)
{
for (unsigned int i = 0; i < feSpaces.size(); i++)
if (feSpaces[i] == feSpace)
return mapLocal(index, feSpace, i);
return -1;
}
inline int mapGlobal(int index, int ithFeSpace)
{
int result = rStartDofs;
for (int i = 0; i < ithFeSpace; i++)
result += data[feSpaces[i]].nRankDofs;
result += data[feSpaces[ithFeSpace]][index];
return result;
}
inline int mapGlobal(int index, const FiniteElemSpace *feSpace)
{
for (unsigned int i = 0; i < feSpaces.size(); i++)
if (feSpaces[i] == feSpace)
return mapGlobal(index, feSpace, i);
return -1;
}
private:
MPI::Intracomm* mpiComm;
map<const FiniteElemSpace*, T> data;
vector<const FiniteElemSpace*> feSpaces;
int nRankDofs, nOverallDofs, rStartDofs;
};
}
......
This diff is collapsed.
......@@ -94,13 +94,13 @@ namespace AMDiS {
/// Creates PETSc KSP solver object for solving the Schur complement
/// system on the primal variables, \ref ksp_schur_primal
void createSchurPrimalKsp();
void createSchurPrimalKsp(vector<const FiniteElemSpace*> &feSpaces);
/// Destroys PETSc KSP solver object \ref ksp_schur_primal
void destroySchurPrimalKsp();
/// Creates PETSc KSP solver object for the FETI-DP operator, \ref ksp_feti
void createFetiKsp();
void createFetiKsp(vector<const FiniteElemSpace*> &feSpaces);
/// Destroys FETI-DP operator, \ref ksp_feti
void destroyFetiKsp();
......
......@@ -33,27 +33,54 @@ namespace AMDiS {
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
nestMat.resize(nComponents * nComponents);
if (nBlocks == -1) {
nBlocks = nComponents;
for (int i = 0; i < nBlocks; i++)
componentInBlock[i] = i;
}
vector<int> compNthInBlock(nComponents, 0);
vector<int> blockSize(nBlocks, 0);
for (int i = 0; i < nComponents; i++) {
compNthInBlock[i] = blockSize[componentInBlock[i]];
blockSize[componentInBlock[i]]++;
}
nestMat.resize(nBlocks * nBlocks);
// === Transfer values from DOF matrices to the PETSc matrix. ===
for (int i = 0; i < nBlocks; i++)
for (int j = 0; j < nBlocks; j++)
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRankRows * blockSize[i], nRankRows * blockSize[j],
nOverallRows * blockSize[i], nOverallRows * blockSize[j],
30 * blockSize[i], PETSC_NULL,
30 * blockSize[j], PETSC_NULL,
&(nestMat[i * nBlocks + j]));
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) {
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRankRows, nRankRows,
nOverallRows, nOverallRows,
30, PETSC_NULL, 30, PETSC_NULL,
&(nestMat[i * nComponents + j]));
setDofMatrix(nestMat[i * nComponents + j], (*mat)[i][j]);
MatAssemblyBegin(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
} else {
nestMat[i * nComponents + j] = PETSC_NULL;
int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
setDofMatrix(nestMat[idx], (*mat)[i][j],
compNthInBlock[i], compNthInBlock[j]);
}
for (int i = 0; i < nBlocks; i++) {
for (int j = 0; j < nBlocks; j++) {
int idx = i * nBlocks + j;
if (nestMat[idx]) {
MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
}
}
}
MatCreateNest(PETSC_COMM_WORLD,
nComponents, PETSC_NULL, nComponents, PETSC_NULL,
nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &petscMatrix);
#if (DEBUG != 0)
......@@ -161,7 +188,10 @@ namespace AMDiS {
}
void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, DOFMatrix* mat)
void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat,
DOFMatrix* mat,
int dispRowBlock,
int dispColBlock)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");
......@@ -180,6 +210,9 @@ namespace AMDiS {
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
int dispRowIndex = meshDistributor->getNumberRankDofs(feSpace) * dispRowBlock;
int dispColIndex = meshDistributor->getNumberRankDofs(feSpace) * dispColBlock;
vector<int> cols;
vector<double> values;
cols.reserve(300);
......@@ -192,7 +225,8 @@ namespace AMDiS {
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int rowIndex = meshDistributor->mapDofToGlobal(feSpace, *cursor);
int rowIndex =
meshDistributor->mapDofToGlobal(feSpace, *cursor) + dispRowIndex;
cols.clear();
values.clear();
......@@ -200,7 +234,8 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int colIndex = meshDistributor->mapDofToGlobal(feSpace, col(*icursor));
int colIndex =
meshDistributor->mapDofToGlobal(feSpace, col(*icursor)) + dispColIndex;
// Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex)
......
......@@ -36,20 +36,22 @@ namespace AMDiS {
public:
PetscSolverGlobalBlockMatrix()
: PetscSolver(),
nComponents(0)
nComponents(0),
nBlocks(-1)
{}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat);
void fillPetscRhs(SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void destroyMatrixData();
protected:
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(Mat& petscMat, DOFMatrix* mat);
void setDofMatrix(Mat& petscMat, DOFMatrix* mat,
int dispRowBlock, int dispColBlock);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec);
......@@ -65,7 +67,14 @@ namespace AMDiS {
vector<Vec> nestVec;
/// Number of components (= number of unknowns in the PDE)
int nComponents;
/// Number of blocks for the solver, must be 1 <= nBlocks <= nComponents
int nBlocks;
/// Maps to each component number the block number the component is in.
map<int, int> componentInBlock;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment