Commit 8a2a712f authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Make assembling of periodic BC in parallel faster.

parent 7dd8b190
......@@ -1854,7 +1854,7 @@ namespace AMDiS {
StdMpi<vector<int> > stdMpi(mpiComm, false);
// === Each rank traverse its periodic boundaries and sends the dof indices ===
// === Each rank traverse its periodic boundaries and sends the DOF indices ===
// === to the rank "on the other side" of the periodic boundary. ===
RankToDofContainer rankPeriodicDofs;
......@@ -1863,8 +1863,6 @@ namespace AMDiS {
for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
it != periodicBoundary.boundary.end(); ++it) {
// MSG("------------- WITH RANK %d ------------------\n", it->first);
if (it->first == mpiRank) {
TEST_EXIT_DBG(it->second.size() % 2 == 0)("Should not happen!\n");
......@@ -1876,10 +1874,6 @@ namespace AMDiS {
bound.neighObj.el->getVertexDofs(feSpace, bound.neighObj, dofs1);
bound.neighObj.el->getNonVertexDofs(feSpace, bound.neighObj, dofs1);
// MSG("BOUND-I %d-%d-%d WITH %d-%d-%d\n",
// bound.rankObj.elIndex, bound.rankObj.subObj, bound.rankObj.ithObj,
// bound.neighObj.elIndex, bound.neighObj.subObj, bound.neighObj.ithObj);
TEST_EXIT_DBG(dofs0.size() == dofs1.size())("Should not happen!\n");
BoundaryType type = bound.type;
......@@ -1891,8 +1885,6 @@ namespace AMDiS {
if (periodicDofAssociations[globalDof0].count(type) == 0) {
periodicDof[type][globalDof0] = globalDof1;
periodicDofAssociations[globalDof0].insert(type);
// MSG("SET(A TYPE %d) DOF %d -> %d\n", type, globalDof0, globalDof1);
}
}
}
......@@ -1904,22 +1896,11 @@ namespace AMDiS {
boundIt != it->second.end(); ++boundIt) {
int nDofs = dofs.size();
// MSG("BOUND-R (T = %d) %d-%d-%d WITH %d-%d-%d\n",
// boundIt->type,
// boundIt->rankObj.elIndex, boundIt->rankObj.subObj, boundIt->rankObj.ithObj,
// boundIt->neighObj.elIndex, boundIt->neighObj.subObj, boundIt->neighObj.ithObj);
boundIt->rankObj.el->getVertexDofs(feSpace, boundIt->rankObj, dofs);
boundIt->rankObj.el->getNonVertexDofs(feSpace, boundIt->rankObj, dofs);
for (unsigned int i = 0; i < (dofs.size() - nDofs); i++) {
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[nDofs + i]), feSpace, c);
// MSG(" SEND i = %d DOF = %d (%f %f %f)\n", nDofs + i, mapLocalGlobalDofs[*(dofs[nDofs + i])], c[0], c[1], c[2]);
for (unsigned int i = 0; i < (dofs.size() - nDofs); i++)
rankToDofType[it->first].push_back(boundIt->type);
}
}
// Send the global indices to the rank on the other side.
......@@ -1944,9 +1925,6 @@ namespace AMDiS {
for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
it != periodicBoundary.boundary.end(); ++it) {
// MSG("------------- WITH RANK %d ------------------\n", it->first);
DofContainer& dofs = rankPeriodicDofs[it->first];
vector<int>& types = rankToDofType[it->first];
......@@ -1958,25 +1936,68 @@ namespace AMDiS {
int mapGlobalDofIndex = stdMpi.getRecvData(it->first)[i];
BoundaryType type = types[i];
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[i]), feSpace, c);
// MSG(" RECV i = %d DOF = %d (%f %f %f)\n", i, *(dofs[i]), c[0], c[1], c[2]);
// Check if this global dof with the corresponding boundary type was
// not added before by another periodic boundary from other rank.
if (periodicDofAssociations[globalDofIndex].count(type) == 0) {
// MSG("SET(B-%d TYPE %d) DOF %d -> %d\n", i, type, globalDofIndex, mapGlobalDofIndex);
periodicDof[type][globalDofIndex] = mapGlobalDofIndex;
periodicDofAssociations[globalDofIndex].insert(type);
} else {
// MSG("ASSOC ALREADY SET FOR %d TYPE %d\n", i, type);
}
}
}
StdMpi<PeriodicDofMap> stdMpi2(mpiComm);
for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
it != periodicBoundary.boundary.end(); ++it) {
if (it->first == mpiRank)
continue;
PeriodicDofMap perObjMap;
for (vector<AtomicBoundary>::iterator boundIt = it->second.begin();
boundIt != it->second.end(); ++boundIt) {
if (boundIt->type >= -3)
continue;
DofContainer dofs;
boundIt->rankObj.el->getVertexDofs(feSpace, boundIt->rankObj, dofs);
boundIt->rankObj.el->getNonVertexDofs(feSpace, boundIt->rankObj, dofs);
for (unsigned int i = 0; i < dofs.size(); i++) {
DegreeOfFreedom globalDof = mapLocalGlobalDofs[*dofs[i]];
for (std::set<BoundaryType>::iterator perAscIt = periodicDofAssociations[globalDof].begin();
perAscIt != periodicDofAssociations[globalDof].end(); ++perAscIt)
if (*perAscIt >= -3) {
TEST_EXIT_DBG(periodicDof[*perAscIt].count(globalDof) == 1)
("Should not happen!\n");
perObjMap[*perAscIt][globalDof] = periodicDof[*perAscIt][globalDof];
}
}
}
stdMpi2.send(it->first, perObjMap);
stdMpi2.recv(it->first);
}
stdMpi2.startCommunication<int>(MPI_INT);
for (std::map<int, PeriodicDofMap>::iterator it = stdMpi2.getRecvData().begin();
it != stdMpi2.getRecvData().end(); ++it)
for (PeriodicDofMap::iterator perIt = it->second.begin();
perIt != it->second.end(); ++perIt)
for (DofMapping::iterator dofIt = perIt->second.begin();
dofIt != perIt->second.end(); ++dofIt) {
TEST_EXIT_DBG(periodicDof[perIt->first].count(dofIt->second) == 0 ||
periodicDof[perIt->first][dofIt->second] == dofIt->first)
("Should not happen!\n");
periodicDof[perIt->first][dofIt->second] = dofIt->first;
}
#if (DEBUG != 0)
ParallelDebug::testPeriodicBoundary(*this);
#endif
......
......@@ -56,52 +56,6 @@ namespace AMDiS {
TEST_EXIT(mat)("No DOFMatrix!\n");
typedef map<DegreeOfFreedom, DegreeOfFreedom> DofMapping;
typedef map<BoundaryType, DofMapping> PeriodicDofMap;
StdMpi<PeriodicDofMap> stdMpi(meshDistributor->getMpiComm());
if (meshDistributor->getMpiRank() == 0) {
for (int i = 1; i < meshDistributor->getMpiSize(); i++)
stdMpi.recv(i);
} else {
stdMpi.send(0, meshDistributor->getPeriodicMapping());
}
stdMpi.startCommunication<int>(MPI_INT);
StdMpi<PeriodicDofMap> stdMpi2(meshDistributor->getMpiComm());
PeriodicDofMap overallDofMap;
if (meshDistributor->getMpiRank() == 0) {
overallDofMap = meshDistributor->getPeriodicMapping();
for (int i = 1; i < meshDistributor->getMpiSize(); i++) {
PeriodicDofMap &rankDofMap = stdMpi.getRecvData(i);
for (PeriodicDofMap::iterator it = rankDofMap.begin(); it != rankDofMap.end(); ++it) {
for (DofMapping::iterator dofIt = it->second.begin(); dofIt != it->second.end(); ++dofIt) {
if (overallDofMap[it->first].count(dofIt->first) == 1) {
TEST_EXIT_DBG(overallDofMap[it->first][dofIt->first] == dofIt->second)
("Should not happen!\n");
} else {
overallDofMap[it->first][dofIt->first] = dofIt->second;
}
}
}
}
for (int i = 1; i < meshDistributor->getMpiSize(); i++)
stdMpi2.send(i, overallDofMap);
} else {
stdMpi2.recv(0);
}
stdMpi2.startCommunication<int>(MPI_INT);
if (meshDistributor->getMpiRank() > 0)
overallDofMap = stdMpi2.getRecvData(0);
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits= mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
......@@ -134,6 +88,9 @@ namespace AMDiS {
// Calculate petsc row index.
int rowIndex = globalRowDof * dispMult + dispAddRow;
cols.clear();
values.clear();
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
......@@ -144,8 +101,8 @@ namespace AMDiS {
if (!periodicCol) {
// Calculate the exact position of the column index in the petsc matrix.
int colIndex = globalColDof * dispMult + dispAddCol;
MatSetValue(petscMatrix, rowIndex, colIndex, value(*icursor), ADD_VALUES);
cols.push_back(globalColDof * dispMult + dispAddCol);
values.push_back(value(*icursor));
} else {
std::set<int>& perColAsc = meshDistributor->getPerDofAssociations(globalColDof);
std::set<int> perAsc;
......@@ -160,18 +117,21 @@ namespace AMDiS {
for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
int nCols = static_cast<int>(newCols.size());
for (int i = 0; i < nCols; i++) {
TEST_EXIT(overallDofMap[*it].count(newCols[i]) > 0)("Should not happen: %d %d\n", *it, newCols[i]);
newCols.push_back(overallDofMap[*it][newCols[i]]);
TEST_EXIT(meshDistributor->isPeriodicDof(*it, newCols[i]))
("Should not happen: %d %d\n", *it, newCols[i]);
newCols.push_back(meshDistributor->getPeriodicMapping(*it, newCols[i]));
}
}
for (int i = 0; i < newCols.size(); i++) {
int colIndex = newCols[i] * dispMult + dispAddCol;
MatSetValue(petscMatrix, rowIndex, colIndex, scaledValue, ADD_VALUES);
cols.push_back(newCols[i] * dispMult + dispAddCol);
values.push_back(scaledValue);
}
}
}
MatSetValues(petscMatrix, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
} else {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
......@@ -198,16 +158,17 @@ namespace AMDiS {
int nEntry = static_cast<int>(entry.size());
for (int i = 0; i < nEntry; i++) {
int perRowDof = 0;
if (overallDofMap[*it].count(entry[i].first) > 0)
perRowDof = overallDofMap[*it][entry[i].first];
if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first))
perRowDof = meshDistributor->getPeriodicMapping(*it, entry[i].first);
else
perRowDof = entry[i].first;
int perColDof;
if (overallDofMap[*it].count(entry[i].second) > 0)
perColDof = overallDofMap[*it][entry[i].second];
if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second))
perColDof = meshDistributor->getPeriodicMapping(*it, entry[i].second);
else
perColDof = entry[i].second;
entry.push_back(std::make_pair(perRowDof, perColDof));
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment