Commit ac80508b authored by Thomas Witkowski's avatar Thomas Witkowski

And code revision of parallel dof mapping finished.

parent 5a7ff74f
...@@ -252,7 +252,8 @@ namespace AMDiS { ...@@ -252,7 +252,8 @@ namespace AMDiS {
rStartDofs = computeStartDofs(); rStartDofs = computeStartDofs();
// And finally, compute the matrix indices. // And finally, compute the matrix indices.
computeMatIndex(); if (needMatIndex)
computeMatIndex(needMatIndexFromGlobal);
} }
...@@ -271,10 +272,10 @@ namespace AMDiS { ...@@ -271,10 +272,10 @@ namespace AMDiS {
} }
void ParallelDofMapping::computeMatIndex() void ParallelDofMapping::computeMatIndex(bool globalIndex)
{ {
FUNCNAME("ParallelDofMapping::computeMatIndex()"); FUNCNAME("ParallelDofMapping::computeMatIndex()");
dofToMatIndex.clear(); dofToMatIndex.clear();
// The offset is always added to the local matrix index. The offset for the // The offset is always added to the local matrix index. The offset for the
...@@ -293,7 +294,10 @@ namespace AMDiS { ...@@ -293,7 +294,10 @@ namespace AMDiS {
for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) { for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) {
if (data[feSpaces[i]].isRankDof(it->first)) { if (data[feSpaces[i]].isRankDof(it->first)) {
int globalMatIndex = it->second.local + offset; int globalMatIndex = it->second.local + offset;
dofToMatIndex.add(i, it->first, globalMatIndex); if (globalIndex)
dofToMatIndex.add(i, it->second.global, globalMatIndex);
else
dofToMatIndex.add(i, it->first, globalMatIndex);
} }
} }
...@@ -318,7 +322,10 @@ namespace AMDiS { ...@@ -318,7 +322,10 @@ namespace AMDiS {
for (; !it.endDofIter(); it.nextDof()) for (; !it.endDofIter(); it.nextDof())
if (dofMap.count(it.getDofIndex())) if (dofMap.count(it.getDofIndex()))
sendGlobalDofs.push_back(dofToMatIndex.get(i, it.getDofIndex())); if (globalIndex)
sendGlobalDofs.push_back(dofToMatIndex.get(i, dofMap[it.getDofIndex()].global));
else
sendGlobalDofs.push_back(dofToMatIndex.get(i, it.getDofIndex()));
stdMpi.send(it.getRank(), sendGlobalDofs); stdMpi.send(it.getRank(), sendGlobalDofs);
} }
...@@ -336,7 +343,10 @@ namespace AMDiS { ...@@ -336,7 +343,10 @@ namespace AMDiS {
for (; !it.endDofIter(); it.nextDof()) { for (; !it.endDofIter(); it.nextDof()) {
if (dofMap.count(it.getDofIndex())) { if (dofMap.count(it.getDofIndex())) {
DegreeOfFreedom d = stdMpi.getRecvData(it.getRank())[counter++]; DegreeOfFreedom d = stdMpi.getRecvData(it.getRank())[counter++];
dofToMatIndex.add(i, it.getDofIndex(), d); if (globalIndex)
dofToMatIndex.add(i, dofMap[it.getDofIndex()].global, d);
else
dofToMatIndex.add(i, it.getDofIndex(), d);
} }
} }
} }
......
...@@ -260,6 +260,8 @@ namespace AMDiS { ...@@ -260,6 +260,8 @@ namespace AMDiS {
sendDofs(NULL), sendDofs(NULL),
recvDofs(NULL), recvDofs(NULL),
hasNonLocalDofs(false), hasNonLocalDofs(false),
needMatIndex(false),
needMatIndexFromGlobal(false),
feSpaces(0), feSpaces(0),
nRankDofs(-1), nRankDofs(-1),
nLocalDofs(-1), nLocalDofs(-1),
...@@ -374,6 +376,15 @@ namespace AMDiS { ...@@ -374,6 +376,15 @@ namespace AMDiS {
ERROR_EXIT("MUST BE IMPLEMENTED!\n"); ERROR_EXIT("MUST BE IMPLEMENTED!\n");
} }
void setComputeMatIndex(bool b, bool global = false)
{
needMatIndex = b;
needMatIndexFromGlobal = global;
}
/// Compute local and global matrix indices.
void computeMatIndex(bool globalIndex);
protected: protected:
/// Insert a new FE space DOF mapping for a given FE space. /// Insert a new FE space DOF mapping for a given FE space.
void addFeSpace(const FiniteElemSpace* feSpace); void addFeSpace(const FiniteElemSpace* feSpace);
...@@ -390,9 +401,6 @@ namespace AMDiS { ...@@ -390,9 +401,6 @@ namespace AMDiS {
/// Compute \ref rStartDofs. /// Compute \ref rStartDofs.
int computeStartDofs(); int computeStartDofs();
/// Compute local and global matrix indices.
void computeMatIndex();
private: private:
/// MPI communicator object; /// MPI communicator object;
MPI::Intracomm mpiComm; MPI::Intracomm mpiComm;
...@@ -405,6 +413,16 @@ namespace AMDiS { ...@@ -405,6 +413,16 @@ namespace AMDiS {
/// are also owned by this rank. /// are also owned by this rank.
bool hasNonLocalDofs; bool hasNonLocalDofs;
/// If true, matrix indeces for the stored DOFs are computed, see
/// \ref computeMatIndex.
bool needMatIndex;
/// If matrix indices should be computed, this variable defines if the
/// mapping from DOF indices to matrix row indices is defined on local
/// or global DOF indices. If true, the mapping is to specify and to use
/// on global ones, otherwise on local DOF indices.
bool needMatIndexFromGlobal;
/// Maps from FE space pointers to DOF mappings. /// Maps from FE space pointers to DOF mappings.
map<const FiniteElemSpace*, FeSpaceDofMap> data; map<const FiniteElemSpace*, FeSpaceDofMap> data;
......
...@@ -244,15 +244,25 @@ namespace AMDiS { ...@@ -244,15 +244,25 @@ namespace AMDiS {
primalDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(), primalDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true); true, true);
primalDofMap.setComputeMatIndex(true);
dualDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(), dualDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false); false, false);
dualDofMap.setComputeMatIndex(true);
lagrangeMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(), lagrangeMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true); true, true);
lagrangeMap.setComputeMatIndex(true);
localDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(), localDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false); false, false);
if (fetiPreconditioner == FETI_DIRICHLET) localDofMap.setComputeMatIndex(true);
if (fetiPreconditioner == FETI_DIRICHLET) {
interiorDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(), interiorDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false); false, false);
interiorDofMap.setComputeMatIndex(true);
}
} }
......
...@@ -26,37 +26,20 @@ namespace AMDiS { ...@@ -26,37 +26,20 @@ namespace AMDiS {
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n"); TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime(); double wtime = MPI::Wtime();
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
dofMap->update(feSpaces);
int nRankRows = dofMap->getRankDofs();
int nOverallRows = dofMap->getOverallDofs();
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscTmpVec);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1) {
VecSetBlockSize(petscSolVec, testddd);
VecSetBlockSize(petscTmpVec, testddd);
}
// === Check if mesh was changed and, in this case, recompute matrix ===
// === nnz structure and matrix indices. ===
int recvAllValues = 0; int recvAllValues = 0;
int sendValue = int sendValue =
static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz); static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
mpiComm.Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM); mpiComm.Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
recvAllValues = 1;
if (!d_nnz || recvAllValues != 0 || alwaysCreateNnzStructure) { if (!d_nnz || recvAllValues != 0 || alwaysCreateNnzStructure) {
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
// Global DOF mapping must only be recomputed, if the NNZ structure has dofMap->setComputeMatIndex(true, true);
// changed (thus, also the mesh has changed). dofMap->update(feSpaces);
createGlobalDofMapping(feSpaces);
if (d_nnz) { if (d_nnz) {
delete [] d_nnz; delete [] d_nnz;
...@@ -70,6 +53,22 @@ namespace AMDiS { ...@@ -70,6 +53,22 @@ namespace AMDiS {
} }
// === Create PETSc vector (solution and a temporary vector). ===
int nRankRows = dofMap->getRankDofs();
int nOverallRows = dofMap->getOverallDofs();
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(mpiComm, nRankRows, nOverallRows, &petscTmpVec);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1) {
VecSetBlockSize(petscSolVec, testddd);
VecSetBlockSize(petscTmpVec, testddd);
}
// === Create PETSc matrix with the computed nnz data structure. === // === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ(mpiComm, nRankRows, nRankRows, MatCreateMPIAIJ(mpiComm, nRankRows, nRankRows,
...@@ -134,7 +133,6 @@ namespace AMDiS { ...@@ -134,7 +133,6 @@ namespace AMDiS {
TEST_EXIT_DBG(vec)("No DOF vector defined!\n"); TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
TEST_EXIT_DBG(dofMap)("No parallel DOF map defined!\n"); TEST_EXIT_DBG(dofMap)("No parallel DOF map defined!\n");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(vec);
int nRankRows = dofMap->getRankDofs(); int nRankRows = dofMap->getRankDofs();
int nOverallRows = dofMap->getOverallDofs(); int nOverallRows = dofMap->getOverallDofs();
...@@ -279,7 +277,7 @@ namespace AMDiS { ...@@ -279,7 +277,7 @@ namespace AMDiS {
// === Row DOF index is not periodic. === // === Row DOF index is not periodic. ===
// Get PETSc's mat row index. // Get PETSc's mat row index.
int rowIndex = dofToMatIndex.get(nRowMat, globalRowDof); int rowIndex = dofMap->getMatIndex(nRowMat, globalRowDof);
cols.clear(); cols.clear();
values.clear(); values.clear();
...@@ -292,7 +290,7 @@ namespace AMDiS { ...@@ -292,7 +290,7 @@ namespace AMDiS {
// Test if the current col dof is a periodic dof. // Test if the current col dof is a periodic dof.
bool periodicCol = perMap.isPeriodic(colFe, globalColDof); bool periodicCol = perMap.isPeriodic(colFe, globalColDof);
// Get PETSc's mat col index. // Get PETSc's mat col index.
int colIndex = dofToMatIndex.get(nColMat, globalColDof); int colIndex = dofMap->getMatIndex(nColMat, globalColDof);
// Ignore all zero entries, expect it is a diagonal entry. // Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex) if (value(*icursor) == 0.0 && rowIndex != colIndex)
...@@ -341,7 +339,7 @@ namespace AMDiS { ...@@ -341,7 +339,7 @@ namespace AMDiS {
} }
for (unsigned int i = 0; i < newCols.size(); i++) { for (unsigned int i = 0; i < newCols.size(); i++) {
cols.push_back(dofToMatIndex.get(nColMat, newCols[i])); cols.push_back(dofMap->getMatIndex(nColMat, newCols[i]));
values.push_back(scaledValue); values.push_back(scaledValue);
} }
} }
...@@ -428,8 +426,8 @@ namespace AMDiS { ...@@ -428,8 +426,8 @@ namespace AMDiS {
// === Translate the matrix entries to PETSc's matrix. // === Translate the matrix entries to PETSc's matrix.
for (unsigned int i = 0; i < entry.size(); i++) { for (unsigned int i = 0; i < entry.size(); i++) {
int rowIdx = dofToMatIndex.get(nRowMat, entry[i].first); int rowIdx = dofMap->getMatIndex(nRowMat, entry[i].first);
int colIdx = dofToMatIndex.get(nColMat, entry[i].second); int colIdx = dofMap->getMatIndex(nColMat, entry[i].second);
colsMap[rowIdx].push_back(colIdx); colsMap[rowIdx].push_back(colIdx);
valsMap[rowIdx].push_back(scaledValue); valsMap[rowIdx].push_back(scaledValue);
...@@ -474,7 +472,7 @@ namespace AMDiS { ...@@ -474,7 +472,7 @@ namespace AMDiS {
DegreeOfFreedom globalRowDof = DegreeOfFreedom globalRowDof =
(*dofMap)[feSpace][dofIt.getDOFIndex()].global; (*dofMap)[feSpace][dofIt.getDOFIndex()].global;
// Get PETSc's mat index of the row DOF. // Get PETSc's mat index of the row DOF.
int index = dofToMatIndex.get(nRowVec, globalRowDof); int index = dofMap->getMatIndex(nRowVec, globalRowDof);
if (perMap.isPeriodic(feSpace, globalRowDof)) { if (perMap.isPeriodic(feSpace, globalRowDof)) {
std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof); std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof);
...@@ -484,7 +482,7 @@ namespace AMDiS { ...@@ -484,7 +482,7 @@ namespace AMDiS {
for (std::set<int>::iterator perIt = perAsc.begin(); for (std::set<int>::iterator perIt = perAsc.begin();
perIt != perAsc.end(); ++perIt) { perIt != perAsc.end(); ++perIt) {
int mappedDof = perMap.map(feSpace, *perIt, globalRowDof); int mappedDof = perMap.map(feSpace, *perIt, globalRowDof);
int mappedIndex = dofToMatIndex.get(nRowVec, mappedDof); int mappedIndex = dofMap->getMatIndex(nRowVec, mappedDof);
VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES); VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
} }
} else { } else {
...@@ -577,7 +575,7 @@ namespace AMDiS { ...@@ -577,7 +575,7 @@ namespace AMDiS {
int globalRowDof = (*dofMap)[feSpaces[i]][*cursor].global; int globalRowDof = (*dofMap)[feSpaces[i]][*cursor].global;
// The corresponding global matrix row index of the current row DOF. // The corresponding global matrix row index of the current row DOF.
int petscRowIdx = dofToMatIndex.get(i, globalRowDof); int petscRowIdx = dofMap->getMatIndex(i, globalRowDof);
if ((*dofMap)[feSpaces[i]].isRankDof(*cursor)) { if ((*dofMap)[feSpaces[i]].isRankDof(*cursor)) {
// === The current row DOF is a rank DOF, so create the === // === The current row DOF is a rank DOF, so create the ===
...@@ -601,7 +599,7 @@ namespace AMDiS { ...@@ -601,7 +599,7 @@ namespace AMDiS {
for (icursor_type icursor = begin<nz>(cursor), for (icursor_type icursor = begin<nz>(cursor),
icend = end<nz>(cursor); icursor != icend; ++icursor) { icend = end<nz>(cursor); icursor != icend; ++icursor) {
int globalColDof = (*dofMap)[feSpaces[j]][col(*icursor)].global; int globalColDof = (*dofMap)[feSpaces[j]][col(*icursor)].global;
int petscColIdx = dofToMatIndex.get(j, globalColDof); int petscColIdx = dofMap->getMatIndex(j, globalColDof);
if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) { if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
// The row DOF is a rank DOF, if also the column is a rank DOF, // The row DOF is a rank DOF, if also the column is a rank DOF,
...@@ -629,7 +627,7 @@ namespace AMDiS { ...@@ -629,7 +627,7 @@ namespace AMDiS {
if (value(*icursor) != 0.0) { if (value(*icursor) != 0.0) {
int globalColDof = int globalColDof =
(*dofMap)[feSpaces[j]][col(*icursor)].global; (*dofMap)[feSpaces[j]][col(*icursor)].global;
int petscColIdx = dofToMatIndex.get(j, globalColDof); int petscColIdx = dofMap->getMatIndex(j, globalColDof);
sendMatrixEntry[sendToRank]. sendMatrixEntry[sendToRank].
push_back(make_pair(petscRowIdx, petscColIdx)); push_back(make_pair(petscRowIdx, petscColIdx));
...@@ -686,99 +684,4 @@ namespace AMDiS { ...@@ -686,99 +684,4 @@ namespace AMDiS {
d_nnz[i] = std::min(d_nnz[i], nRankRows); d_nnz[i] = std::min(d_nnz[i], nRankRows);
} }
void PetscSolverGlobalMatrix::createGlobalDofMapping(vector<const FiniteElemSpace*> &feSpaces)
{
FUNCNAME("PetscSolverGlobalMatrix::createGlobalDofMapping()");
int offset = dofMap->getStartDofs();
Mesh *mesh = feSpaces[0]->getMesh();
dofToMatIndex.clear();
for (unsigned int i = 0; i < feSpaces.size(); i++) {
// === Create indices for all DOFs in rank' domain. ===
std::set<const DegreeOfFreedom*> rankDofSet;
mesh->getAllDofs(feSpaces[i], rankDofSet);
for (std::set<const DegreeOfFreedom*>::iterator it = rankDofSet.begin();
it != rankDofSet.end(); ++it)
if ((*dofMap)[feSpaces[i]].isRankDof(**it)) {
int globalIndex = (*dofMap)[feSpaces[i]][**it].global;
int globalMatIndex =
globalIndex - (*dofMap)[feSpaces[i]].rStartDofs + offset;
dofToMatIndex.add(i, globalIndex, globalMatIndex);
}
// === Communicate interior boundary DOFs between domains. ===
StdMpi<vector<int> > stdMpi(mpiComm);
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpaces[i]);
!it.end(); it.nextRank()) {
vector<DegreeOfFreedom> sendGlobalDofs;
for (; !it.endDofIter(); it.nextDof()) {
int globalIndex = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int globalMatIndex = dofToMatIndex.get(i, globalIndex);
sendGlobalDofs.push_back(globalMatIndex);
}
stdMpi.send(it.getRank(), sendGlobalDofs);
}
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]);
!it.end(); it.nextRank())
stdMpi.recv(it.getRank());
stdMpi.startCommunication();
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]);
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
int globalIndex = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int globalMatIndex =
stdMpi.getRecvData(it.getRank())[it.getDofCounter()];
dofToMatIndex.add(i, globalIndex, globalMatIndex);
}
// === Communicate DOFs on periodic boundaries. ===
stdMpi.clear();
for (DofComm::Iterator it(meshDistributor->getPeriodicDofs(), feSpaces[i]);
!it.end(); it.nextRank()) {
vector<DegreeOfFreedom> sendGlobalDofs;
for (; !it.endDofIter(); it.nextDof()) {
int ind0 = (*dofMap)[feSpaces[i]][it.getDofIndex()].global;
int ind1 = dofToMatIndex.get(i, ind0);
sendGlobalDofs.push_back(ind0);
sendGlobalDofs.push_back(ind1);
}
stdMpi.send(it.getRank(), sendGlobalDofs);
stdMpi.recv(it.getRank());
}
stdMpi.startCommunication();
for (DofComm::Iterator it(meshDistributor->getPeriodicDofs(), feSpaces[i]);
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
int ind0 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2];
int ind1 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2 + 1];
dofToMatIndex.add(i, ind0, ind1);
}
// === Update offset. ===
offset += (*dofMap)[feSpaces[i]].nRankDofs;
}
}
} }
...@@ -68,8 +68,6 @@ namespace AMDiS { ...@@ -68,8 +68,6 @@ namespace AMDiS {
void setDofVector(Vec& petscVec, DOFVector<double>* vec, void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int nRowVec, bool rankOnly = false); int nRowVec, bool rankOnly = false);
void createGlobalDofMapping(vector<const FiniteElemSpace*> &feSpaces);
protected: protected:
/// Arrays definig the non zero pattern of Petsc's matrix. /// Arrays definig the non zero pattern of Petsc's matrix.
int *d_nnz, *o_nnz; int *d_nnz, *o_nnz;
...@@ -91,10 +89,6 @@ namespace AMDiS { ...@@ -91,10 +89,6 @@ namespace AMDiS {
/// operators using DOFVectors from old timestep containing many zeros due to /// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields. /// some phase fields.
bool alwaysCreateNnzStructure; bool alwaysCreateNnzStructure;
/// Mapping from global DOF indices to global matrix indices under
/// consideration of possibly multiple components.
DofToMatIndex dofToMatIndex;
}; };
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment