Commit 5a7ff74f authored by Thomas Witkowski's avatar Thomas Witkowski

Removed parallel dof mapping access in MeshDistributor.

parent 90357cd5
......@@ -145,70 +145,16 @@ namespace AMDiS {
return feSpaces;
}
/// Returns the number of DOFs in rank's domain for a given FE space.
inline int getNumberRankDofs(const FiniteElemSpace *feSpace)
{
return dofMap[feSpace].nRankDofs;
}
/// Returns the number of DOFs in rank's domain for a set of FE spaces.
inline int getNumberRankDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getNumberRankDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++)
result += dofMap[feSpaces[i]].nRankDofs;
return result;
}
/// Returns the first global DOF index of an FE space, owned by rank.
inline int getStartDofs(const FiniteElemSpace *feSpace)
{
return dofMap[feSpace].rStartDofs;
}
/// Returns the first global DOF index for a set of FE spaces, owned by rank.
inline int getStartDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getStartDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++)
result += dofMap[feSpaces[i]].rStartDofs;
return result;
}
/// Returns the global number of DOFs for a given FE space.
inline int getNumberOverallDofs(const FiniteElemSpace *feSpace)
{
return dofMap[feSpace].nOverallDofs;
}
/// Returns the global number of DOFs for a set of FE spaces.
inline int getNumberOverallDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getNumberOverallDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++)
result += dofMap[feSpaces[i]].nOverallDofs;
return result;
}
inline map<DegreeOfFreedom, MultiIndex>& getMapDofToGlobal(const FiniteElemSpace *feSpace)
/// Returns the DOF mapping object, \ref dofMap.
inline ParallelDofMapping& getDofMap()
{
return dofMap[feSpace].getMap();
return dofMap;
}
/// Maps a local DOF to its global index.
inline DegreeOfFreedom mapDofToGlobal(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
/// Returns the periodic mapping handler, \ref periodicMap.
inline PeriodicMap& getPeriodicMap()
{
return dofMap[feSpace][dof].global;
return periodicMap;
}
/// Returns for a global index the DOF index in rank's subdomain. As there
......@@ -218,25 +164,6 @@ namespace AMDiS {
DegreeOfFreedom mapGlobalToLocal(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof);
/// Maps a local DOF to its local index.
inline DegreeOfFreedom mapLocalToDof(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
return dofMap[feSpace][dof].local;
}
/// Returns the DOF mapping object, \ref dofMap.
inline ParallelDofMapping& getDofMap()
{
return dofMap;
}
/// Returns the periodic mapping handler, \ref periodicMap.
inline PeriodicMap& getPeriodicMap()
{
return periodicMap;
}
DofComm& getSendDofs()
{
return sendDofs;
......@@ -252,13 +179,6 @@ namespace AMDiS {
return periodicDofs;
}
/// Return true, if the given DOF is owned by the rank. If false, the DOF
/// is in rank's partition, but is owned by some other rank.
inline bool getIsRankDof(const FiniteElemSpace *feSpace, DegreeOfFreedom dof)
{
return dofMap[feSpace].isRankDof(dof);
}
inline long getLastMeshChangeIndex()
{
return lastMeshChangeIndex;
......
......@@ -775,7 +775,7 @@ namespace AMDiS {
for (it.reset(); !it.end(); ++it) {
file << it.getDOFIndex() << " "
<< pdb.dofMap[feSpace][it.getDOFIndex()].global << " "
<< pdb.getIsRankDof(feSpace, it.getDOFIndex());
<< pdb.dofMap[feSpace].isRankDof(it.getDOFIndex());
for (int i = 0; i < pdb.mesh->getDim(); i++)
file << " " << (*it)[i];
file << "\n";
......
......@@ -336,7 +336,7 @@ namespace AMDiS {
// === create local indices of the primals starting at zero. ===
for (DofIndexSet::iterator it = primals.begin(); it != primals.end(); ++it)
if (meshDistributor->getIsRankDof(feSpace, *it))
if (meshDistributor->getDofMap()[feSpace].isRankDof(*it))
primalDofMap[feSpace].insertRankDof(*it);
else
primalDofMap[feSpace].insert(*it);
......@@ -422,7 +422,8 @@ namespace AMDiS {
int nRankLagrange = 0;
DofMap& dualMap = dualDofMap[feSpace].getMap();
for (DofMap::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
if (meshDistributor->getIsRankDof(feSpace, it->first)) {
if (meshDistributor->getDofMap()[feSpace].isRankDof(it->first)) {
lagrangeMap[feSpace].insertRankDof(it->first, nRankLagrange);
int degree = boundaryDofRanks[feSpace][it->first].size();
nRankLagrange += (degree * (degree - 1)) / 2;
......
......@@ -21,13 +21,14 @@ namespace AMDiS {
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs(feSpace);
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace);
int nRankRows = (*dofMap)[feSpace].nRankDofs;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs;
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
......@@ -107,8 +108,8 @@ namespace AMDiS {
nComponents = vec->getSize();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nRankRows = meshDistributor->getNumberRankDofs(feSpace);
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace);
int nRankRows = (*dofMap)[feSpace].nRankDofs;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs;
nestVec.resize(nComponents);
......@@ -150,12 +151,14 @@ namespace AMDiS {
Vec tmp;
VecNestGetSubVec(petscSolVec, i, &tmp);
int nRankDofs = meshDistributor->getNumberRankDofs(feSpace);
int nRankDofs = (*dofMap)[feSpace].nRankDofs;
PetscScalar *vecPointer;
VecGetArray(tmp, &vecPointer);
for (int j = 0; j < nRankDofs; j++)
dofvec[meshDistributor->mapLocalToDof(feSpace, j)] = vecPointer[j];
DofMap& d = (*dofMap)[feSpace].getMap();
for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
if (it->second.local != -1)
dofvec[it->first] = vecPointer[it->second.local];
VecRestoreArray(tmp, &vecPointer);
}
......@@ -214,8 +217,8 @@ namespace AMDiS {
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
int dispRowIndex = meshDistributor->getNumberRankDofs(feSpace) * dispRowBlock;
int dispColIndex = meshDistributor->getNumberRankDofs(feSpace) * dispColBlock;
int dispRowIndex = (*dofMap)[feSpace].nRankDofs * dispRowBlock;
int dispColIndex = (*dofMap)[feSpace].nRankDofs * dispColBlock;
vector<int> cols;
vector<double> values;
......@@ -229,8 +232,7 @@ namespace AMDiS {
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int rowIndex =
meshDistributor->mapDofToGlobal(feSpace, *cursor) + dispRowIndex;
int rowIndex = (*dofMap)[feSpace][*cursor].global + dispRowIndex;
cols.clear();
values.clear();
......@@ -238,8 +240,7 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int colIndex =
meshDistributor->mapDofToGlobal(feSpace, col(*icursor)) + dispColIndex;
int colIndex = (*dofMap)[feSpace][col(*icursor)].global + dispColIndex;
// Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex)
......@@ -266,7 +267,7 @@ namespace AMDiS {
// Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
int index = meshDistributor->mapDofToGlobal(feSpace, dofIt.getDOFIndex());
int index = (*dofMap)[feSpace][dofIt.getDOFIndex()].global;
double value = *dofIt;
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
......
......@@ -24,11 +24,13 @@ namespace AMDiS {
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime();
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
int nRankRows = meshDistributor->getNumberRankDofs(feSpaces);
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpaces);
dofMap->update(feSpaces);
int nRankRows = dofMap->getRankDofs();
int nOverallRows = dofMap->getOverallDofs();
// === Create PETSc vector (solution and a temporary vector). ===
......@@ -86,9 +88,8 @@ namespace AMDiS {
#if (DEBUG != 0)
int a, b;
MatGetOwnershipRange(petscMatrix, &a, &b);
TEST_EXIT(a == meshDistributor->getStartDofs(feSpaces))
("Wrong matrix ownership range!\n");
TEST_EXIT(b == meshDistributor->getStartDofs(feSpaces) + nRankRows)
TEST_EXIT(a == dofMap->getStartDofs())("Wrong matrix ownership range!\n");
TEST_EXIT(b == dofMap->getStartDofs() + nRankRows)
("Wrong matrix ownership range!\n");
#endif
......@@ -131,11 +132,11 @@ namespace AMDiS {
FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()");
TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel DOF map defined!\n");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(vec);
int nRankRows = meshDistributor->getNumberRankDofs(feSpaces);
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpaces);
int nRankRows = dofMap->getRankDofs();
int nOverallRows = dofMap->getOverallDofs();
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscRhsVec);
......@@ -194,11 +195,10 @@ namespace AMDiS {
int c = 0;
for (int i = 0; i < nComponents; i++) {
DOFVector<double> &dv = *(vec.getDOFVector(i));
const FiniteElemSpace *feSpace = dv.getFeSpace();
int nRankDofs = meshDistributor->getNumberRankDofs(feSpace);
for (int j = 0; j < nRankDofs; j++)
dv[meshDistributor->mapLocalToDof(feSpace, j)] = vecPointer[c++];
DofMap& d = (*dofMap)[dv.getFeSpace()].getMap();
for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
if (it->second.local != -1)
dv[it->first] = vecPointer[c++];
}
VecRestoreArray(petscSolVec, &vecPointer);
......@@ -270,8 +270,8 @@ namespace AMDiS {
const FiniteElemSpace *colFe = mat->getColFeSpace();
// Global index of the current row DOF.
int globalRowDof =
meshDistributor->mapDofToGlobal(rowFe, *cursor);
int globalRowDof = (*dofMap)[rowFe][*cursor].global;
// Test if the current row DOF is a periodic DOF.
bool periodicRow = perMap.isPeriodic(rowFe, globalRowDof);
......@@ -288,8 +288,7 @@ namespace AMDiS {
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof =
meshDistributor->mapDofToGlobal(colFe, col(*icursor));
int globalColDof = (*dofMap)[colFe][col(*icursor)].global;
// Test if the current col dof is a periodic dof.
bool periodicCol = perMap.isPeriodic(colFe, globalColDof);
// Get PETSc's mat col index.
......@@ -364,14 +363,12 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof =
meshDistributor->mapDofToGlobal(colFe, col(*icursor));
int globalColDof = (*dofMap)[colFe][col(*icursor)].global;
// Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
continue;
// === Add all periodic associations of both, the row and the column ===
// === indices to the set perAsc. ===
......@@ -470,12 +467,12 @@ namespace AMDiS {
// Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
if (rankOnly && !meshDistributor->getIsRankDof(feSpace, dofIt.getDOFIndex()))
if (rankOnly && !(*dofMap)[feSpace].isRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the DOF.
DegreeOfFreedom globalRowDof =
meshDistributor->mapDofToGlobal(feSpace, dofIt.getDOFIndex());
(*dofMap)[feSpace][dofIt.getDOFIndex()].global;
// Get PETSc's mat index of the row DOF.
int index = dofToMatIndex.get(nRowVec, globalRowDof);
......@@ -507,8 +504,9 @@ namespace AMDiS {
TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
int nRankRows = meshDistributor->getNumberRankDofs(feSpaces);
int rankStartIndex = meshDistributor->getStartDofs(feSpaces);
int nRankRows = dofMap->getRankDofs();
int rankStartIndex = dofMap->getStartDofs();
d_nnz = new int[nRankRows];
o_nnz = new int[nRankRows];
for (int i = 0; i < nRankRows; i++) {
......@@ -576,15 +574,12 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>(bmat),
cend = end<row>(bmat); cursor != cend; ++cursor) {
int globalRowDof =
meshDistributor->mapDofToGlobal(feSpaces[i], *cursor);
int globalRowDof = (*dofMap)[feSpaces[i]][*cursor].global;
// The corresponding global matrix row index of the current row DOF.
int petscRowIdx = dofToMatIndex.get(i, globalRowDof);
if (meshDistributor->getIsRankDof(feSpaces[i], *cursor)) {
if ((*dofMap)[feSpaces[i]].isRankDof(*cursor)) {
// === The current row DOF is a rank DOF, so create the ===
// === corresponding nnz values directly on rank's nnz data. ===
......@@ -593,8 +588,8 @@ namespace AMDiS {
TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
("Should not happen! \n Debug info: localRowIdx = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d nCompontens = %d nRankRows = %d\n",
*cursor,
meshDistributor->mapDofToGlobal(feSpaces[i], *cursor),
*cursor,
(*dofMap)[feSpaces[i]][*cursor].global,
petscRowIdx,
localPetscRowIdx,
rankStartIndex,
......@@ -605,8 +600,7 @@ namespace AMDiS {
// Traverse all non zero entries in this row.
for (icursor_type icursor = begin<nz>(cursor),
icend = end<nz>(cursor); icursor != icend; ++icursor) {
int globalColDof =
meshDistributor->mapDofToGlobal(feSpaces[j], col(*icursor));
int globalColDof = (*dofMap)[feSpaces[j]][col(*icursor)].global;
int petscColIdx = dofToMatIndex.get(j, globalColDof);
if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
......@@ -633,8 +627,8 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor),
icend = end<nz>(cursor); icursor != icend; ++icursor) {
if (value(*icursor) != 0.0) {
int globalColDof =
meshDistributor->mapDofToGlobal(feSpaces[j], col(*icursor));
int globalColDof =
(*dofMap)[feSpaces[j]][col(*icursor)].global;
int petscColIdx = dofToMatIndex.get(j, globalColDof);
sendMatrixEntry[sendToRank].
......@@ -649,7 +643,7 @@ namespace AMDiS {
// === Send and recv the nnz row structure to/from other ranks. ===
StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
StdMpi<MatrixNnzEntry> stdMpi(mpiComm, true);
stdMpi.send(sendMatrixEntry);
for (std::set<int>::iterator it = recvFromRank.begin();
it != recvFromRank.end(); ++it)
......@@ -697,8 +691,8 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalMatrix::createGlobalDofMapping()");
int offset = meshDistributor->getStartDofs(feSpaces);
Mesh *mesh = meshDistributor->getMesh();
int offset = dofMap->getStartDofs();
Mesh *mesh = feSpaces[0]->getMesh();
dofToMatIndex.clear();
......@@ -709,28 +703,25 @@ namespace AMDiS {
mesh->getAllDofs(feSpaces[i], rankDofSet);
for (std::set<const DegreeOfFreedom*>::iterator it = rankDofSet.begin();
it != rankDofSet.end(); ++it)
if (meshDistributor->getIsRankDof(feSpaces[i], **it)) {
int globalIndex =
meshDistributor->mapDofToGlobal(feSpaces[i], **it);
if ((*dofMap)[feSpaces[i]].isRankDof(**it)) {
int globalIndex = (*dofMap)[feSpaces[i]][**it].global;
int globalMatIndex =
globalIndex - meshDistributor->getStartDofs(feSpaces[i]) + offset;
int globalMatIndex =
globalIndex - (*dofMap)[feSpaces[i]].rStartDofs + offset;
dofToMatIndex.add(i, globalIndex, globalMatIndex);
}
// === Communicate interior boundary DOFs between domains. ===
StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm());
StdMpi<vector<int> > stdMpi(mpiComm);
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpaces[i]);
!it.end(); it.nextRank()) {
vector<DegreeOfFreedom> sendGlobalDofs;
for (; !it.endDofIter(); it.nextDof()) {
int globalIndex =
meshDistributor->mapDofToGlobal(feSpaces[i], it.getDofIndex());
int globalIndex = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int globalMatIndex = dofToMatIndex.get(i, globalIndex);
sendGlobalDofs.push_back(globalMatIndex);
}
......@@ -747,8 +738,7 @@ namespace AMDiS {
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]);
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
int globalIndex =
meshDistributor->mapDofToGlobal(feSpaces[i], it.getDofIndex());
int globalIndex = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int globalMatIndex =
stdMpi.getRecvData(it.getRank())[it.getDofCounter()];
......@@ -765,8 +755,7 @@ namespace AMDiS {
vector<DegreeOfFreedom> sendGlobalDofs;
for (; !it.endDofIter(); it.nextDof()) {
int ind0 =
meshDistributor->mapDofToGlobal(feSpaces[i], it.getDofIndex());
int ind0 = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int ind1 = dofToMatIndex.get(i, ind0);
sendGlobalDofs.push_back(ind0);
......@@ -789,8 +778,7 @@ namespace AMDiS {
}
// === Update offset. ===
offset += meshDistributor->getNumberRankDofs(feSpaces[i]);
offset += (*dofMap)[feSpaces[i]].nRankDofs;
}
}
}
......@@ -23,10 +23,9 @@ namespace AMDiS {
FUNCNAME("PetscSolverSchur::updateDofData()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel DOF map defined!\n");
MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
typedef map<int, DofContainer> RankToDofContainer;
typedef map<DegreeOfFreedom, bool> DofIndexToBool;
......@@ -36,7 +35,7 @@ namespace AMDiS {
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
boundaryLocalDofs.insert(it.getDofIndex());
boundaryDofs.insert(meshDistributor->mapDofToGlobal(feSpace, it.getDofIndex()));
boundaryDofs.insert((*dofMap)[feSpace][it.getDofIndex()].global);
}
......@@ -73,14 +72,14 @@ namespace AMDiS {
int counter = rStartEdgeDofs;
for (DofContainerSet::iterator it = edgeDofs.begin();
it != edgeDofs.end(); ++it)
mapGlobalBoundaryDof[meshDistributor->mapDofToGlobal(feSpace, **it)] =
mapGlobalBoundaryDof[(*dofMap)[feSpace][**it].global] =
counter++;
}
{
int counter = nOverallEdgeDofs + rStartVertexDofs;
for (DofContainerSet::iterator it = vertexDofs.begin();
it != vertexDofs.end(); ++it)
mapGlobalBoundaryDof[meshDistributor->mapDofToGlobal(feSpace, **it)] =
mapGlobalBoundaryDof[(*dofMap)[feSpace][**it].global] =
counter++;
}
#else
......@@ -136,8 +135,7 @@ namespace AMDiS {
stdMpi.getSendData(it.getRank()).reserve(it.getDofs().size());
for (; !it.endDofIter(); it.nextDof()) {
int globalSendDof =
meshDistributor->mapDofToGlobal(feSpace, it.getDofIndex());
int globalSendDof = (*dofMap)[feSpace][it.getDofIndex()].global;
TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalSendDof))
("No mapping for boundary DOF %d!\n", globalSendDof);
......@@ -157,8 +155,7 @@ namespace AMDiS {
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpace);
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
int globalRecvDof =
meshDistributor->mapDofToGlobal(feSpace, it.getDofIndex());
int globalRecvDof = (*dofMap)[feSpace][it.getDofIndex()].global;
mapGlobalBoundaryDof[globalRecvDof] =
stdMpi.getRecvData(it.getRank())[it.getDofCounter()];
boundaryDofs.insert(globalRecvDof);
......@@ -247,8 +244,8 @@ namespace AMDiS {
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
int nRankRows = meshDistributor->getNumberRankDofs(feSpace) * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace) * nComponents;
int nRankRows = (*dofMap)[feSpace].nRankDofs * nComponents;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs * nComponents;
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscTmpVec);
......@@ -261,8 +258,8 @@ namespace AMDiS {
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nComponents = vec->getSize();
int nRankRows = meshDistributor->getNumberRankDofs(feSpace) * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace) * nComponents;
int nRankRows = (*dofMap)[feSpace].nRankDofs * nComponents;
int nOverallRows = (*dofMap)[feSpace].nOverallDofs * nComponents;
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscRhsVec);
......@@ -305,8 +302,7 @@ namespace AMDiS {
for (int i = 0; i < nComponents; i++) {
DOFVector<double>::Iterator dofIt(vec.getDOFVector(i), USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
DegreeOfFreedom globalRowDof =
meshDistributor->mapDofToGlobal(feSpace, dofIt.getDOFIndex());
DegreeOfFreedom globalRowDof = (*dofMap)[feSpace][dofIt.getDOFIndex()].global;
if (boundaryDofs.count(globalRowDof)) {
int index =
(mapGlobalBoundaryDof[globalRowDof] - rStartBoundaryDofs + nInteriorDofs) * (i + 1);
......@@ -377,7 +373,7 @@ namespace AMDiS {
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int globalRowDof = meshDistributor->mapDofToGlobal(feSpace, *cursor);
int globalRowDof = (*dofMap)[feSpace][*cursor].global;
colsBoundary.clear();
colsInterior.clear();
......@@ -386,7 +382,7 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
int globalColDof = meshDistributor->mapDofToGlobal(feSpace, col(*icursor));
int globalColDof = (*dofMap)[feSpace][col(*icursor)].global;
if (boundaryDofs.count(globalColDof)) {
TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalColDof))
......@@ -445,13 +441,12 @@ namespace AMDiS {
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
if (rankOnly && !meshDistributor->getIsRankDof(feSpace, dofIt.getDOFIndex()))
if (rankOnly && !(*dofMap)[feSpace].isRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the DOF.
DegreeOfFreedom globalRowDof =
meshDistributor->mapDofToGlobal(feSpace, dofIt.getDOFIndex());
(*dofMap)[feSpace][dofIt.getDOFIndex()].global;
double value = *dofIt;
if (boundaryDofs.count(globalRowDof)) {
......
......@@ -21,6 +21,7 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti)
MeshDistributor *meshDist = MeshDistributor::globalMeshDistributor;
meshDist->initParallelization();
ParallelDofMapping &dofMap = meshDist->getDofMap();
Mesh* mesh = ellipt.getMesh();
BOOST_REQUIRE(mesh->getNumberOfLeaves() == 8);
......@@ -47,9 +48,9 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti)
vector<double> testData;
testData.push_back(feti.getNumberOfRankPrimals());
testData.push_back(feti.getNumberOfRankDuals());