Commit a2911e8d authored by Thomas Witkowski's avatar Thomas Witkowski

Several small changed to simplify parallel code.

parent 29fd364c
...@@ -372,9 +372,6 @@ namespace AMDiS { ...@@ -372,9 +372,6 @@ namespace AMDiS {
while (elInfo) { while (elInfo) {
// Get element value. // Get element value.
double val = vec[elInfo->getElement()->getIndex()]; double val = vec[elInfo->getElement()->getIndex()];
if (vc == 1101) {
MSG("FOUND EL %d\n", elInfo->getElement()->getIndex());
}
// Write value for each vertex of each element. // Write value for each vertex of each element.
for (int i = 0; i <= dim; i++) for (int i = 0; i <= dim; i++)
......
...@@ -197,7 +197,8 @@ namespace AMDiS { ...@@ -197,7 +197,8 @@ namespace AMDiS {
DOFVector<double>::Iterator dofIt(vec, USED_DOFS); DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) { for (dofIt.reset(); !dofIt.end(); ++dofIt) {
// Calculate global row index of the dof. // Calculate global row index of the dof.
DegreeOfFreedom globalRow = meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex()); DegreeOfFreedom globalRow =
meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
// Calculate petsc index of the row dof. // Calculate petsc index of the row dof.
int index = globalRow * dispMult + dispAdd; int index = globalRow * dispMult + dispAdd;
...@@ -384,6 +385,8 @@ namespace AMDiS { ...@@ -384,6 +385,8 @@ namespace AMDiS {
{ {
FUNCNAME("GlobalMatrixSolver::fillPetscMatrix()"); FUNCNAME("GlobalMatrixSolver::fillPetscMatrix()");
MSG("START FILL PETSC MATRIX!\n");
clock_t first = clock(); clock_t first = clock();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents; int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents; int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
...@@ -402,7 +405,11 @@ namespace AMDiS { ...@@ -402,7 +405,11 @@ namespace AMDiS {
VecSetSizes(petscTmpVec, nRankRows, nOverallRows); VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
VecSetType(petscTmpVec, VECMPI); VecSetType(petscTmpVec, VECMPI);
if (!d_nnz || meshDistributor->getLastMeshChangeIndex() != lastMeshNnz) { int recvAllValues = 0;
int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
if (!d_nnz || recvAllValues != 0) {
if (d_nnz) { if (d_nnz) {
delete [] d_nnz; delete [] d_nnz;
d_nnz = NULL; d_nnz = NULL;
...@@ -430,6 +437,7 @@ namespace AMDiS { ...@@ -430,6 +437,7 @@ namespace AMDiS {
("Wrong matrix ownership range!\n"); ("Wrong matrix ownership range!\n");
#endif #endif
// === Transfer values from DOF matrices to the PETSc matrix. === // === Transfer values from DOF matrices to the PETSc matrix. ===
for (int i = 0; i < nComponents; i++) for (int i = 0; i < nComponents; i++)
...@@ -442,6 +450,7 @@ namespace AMDiS { ...@@ -442,6 +450,7 @@ namespace AMDiS {
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY); MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
// === Transfer values from DOF vector to the PETSc vector. === // === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < nComponents; i++) for (int i = 0; i < nComponents; i++)
...@@ -496,9 +505,9 @@ namespace AMDiS { ...@@ -496,9 +505,9 @@ namespace AMDiS {
int nRankDofs = meshDistributor->getNumberRankDofs(); int nRankDofs = meshDistributor->getNumberRankDofs();
for (int i = 0; i < nComponents; i++) { for (int i = 0; i < nComponents; i++) {
DOFVector<double> *dofvec = vec.getDOFVector(i); DOFVector<double> &dofvec = *(vec.getDOFVector(i));
for (int j = 0; j < nRankDofs; j++) for (int j = 0; j < nRankDofs; j++)
(*dofvec)[meshDistributor->mapLocalToDofIndex(j)] = dofvec[meshDistributor->mapLocalToDofIndex(j)] =
vecPointer[j * nComponents + i]; vecPointer[j * nComponents + i];
} }
......
...@@ -1582,11 +1582,11 @@ namespace AMDiS { ...@@ -1582,11 +1582,11 @@ namespace AMDiS {
elInfo = stack.traverseNext(elInfo); elInfo = stack.traverseNext(elInfo);
} }
DofContainer rankAllDofs; DofContainer rankDofs;
for (DofSet::iterator dofIt = rankDofSet.begin(); dofIt != rankDofSet.end(); ++dofIt) for (DofSet::iterator it = rankDofSet.begin(); it != rankDofSet.end(); ++it)
rankAllDofs.push_back(*dofIt); rankDofs.push_back(*it);
sort(rankAllDofs.begin(), rankAllDofs.end(), cmpDofsByValue); sort(rankDofs.begin(), rankDofs.end(), cmpDofsByValue);
DofContainer rankDofs = rankAllDofs; int nRankAllDofs = rankDofs.size();
// === Traverse on interior boundaries and move all not ranked owned DOFs from === // === Traverse on interior boundaries and move all not ranked owned DOFs from ===
...@@ -1627,20 +1627,8 @@ namespace AMDiS { ...@@ -1627,20 +1627,8 @@ namespace AMDiS {
it->rankObj.el->getVertexDofs(feSpace, it->rankObj, dofs); it->rankObj.el->getVertexDofs(feSpace, it->rankObj, dofs);
it->rankObj.el->getNonVertexDofs(feSpace, it->rankObj, dofs); it->rankObj.el->getNonVertexDofs(feSpace, it->rankObj, dofs);
for (int i = 0; i < static_cast<int>(dofs.size()); i++) { for (int i = 0; i < static_cast<int>(dofs.size()); i++)
// FOR DEBUGGING sendDofs[it.getRank()].push_back(dofs[i]);
/*
WorldVector<double> cs;
mesh->getDofIndexCoords(dofs[i], feSpace, cs);
MSG("SEND EL %d DOF %d TO %d\n", it->rankObj.elIndex, *(dofs[i]), it.getRank());
if (cs.getSize() == 2)
MSG("COORDS-s2: %f %f\n", cs[0], cs[1]);
else
MSG("COORDS-s2: %f %f %f\n", cs[0], cs[1], cs[2]);
*/
sendDofs[it.getRank()].push_back(dofs[i]);
}
} }
...@@ -1655,17 +1643,6 @@ namespace AMDiS { ...@@ -1655,17 +1643,6 @@ namespace AMDiS {
if (eraseIt != rankDofs.end()) if (eraseIt != rankDofs.end())
rankDofs.erase(eraseIt); rankDofs.erase(eraseIt);
// FOR DEBUGGING
/*
WorldVector<double> cs;
mesh->getDofIndexCoords(dofs[i], feSpace, cs);
MSG("RECV EL %d DOF %d FROM %d\n", it->rankObj.elIndex, *(dofs[i]), it.getRank());
if (cs.getSize() == 2)
MSG("COORDS-r2: %f %f\n", cs[0], cs[1]);
else
MSG("COORDS-r2: %f %f %f\n", cs[0], cs[1], cs[2]);
*/
recvDofs[it.getRank()].push_back(dofs[i]); recvDofs[it.getRank()].push_back(dofs[i]);
} }
} }
...@@ -1684,40 +1661,24 @@ namespace AMDiS { ...@@ -1684,40 +1661,24 @@ namespace AMDiS {
nOverallDofs = 0; nOverallDofs = 0;
mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM); mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
// First, we set all dofs in ranks partition to be owend by the rank. Later,
// Do not change the indices now, but create a new indexing and store it here. // the dofs in ranks partition that are owned by other rank are set to false.
DofIndexMap rankDofsNewLocalIndex;
isRankDof.clear(); isRankDof.clear();
int i = 0; for (int i = 0; i < nRankAllDofs; i++)
for (DofContainer::iterator dofIt = rankAllDofs.begin();
dofIt != rankAllDofs.end(); ++dofIt) {
rankDofsNewLocalIndex[*dofIt] = i;
// First, we set all dofs in ranks partition to be owend by the rank. Later,
// the dofs in ranks partition that are owned by other rank are set to false.
isRankDof[i] = true; isRankDof[i] = true;
i++;
}
// Stores for all rank owned dofs a new global index. // Stores for all rank owned dofs a new global index.
DofIndexMap rankDofsNewGlobalIndex; DofIndexMap rankDofsNewGlobalIndex;
// Stores for all rank owned dofs a continues local index. for (int i = 0; i < nRankDofs; i++)
DofIndexMap rankOwnedDofsNewLocalIndex; rankDofsNewGlobalIndex[rankDofs[i]] = i + rstart;
i = 0;
for (DofContainer::iterator dofIt = rankDofs.begin();
dofIt != rankDofs.end(); ++dofIt) {
rankDofsNewGlobalIndex[*dofIt] = i + rstart;
rankOwnedDofsNewLocalIndex[*dofIt] = i;
i++;
}
// === Send new DOF indices. === // === Send new DOF indices. ===
#if (DEBUG != 0) #if (DEBUG != 0)
ParallelDomainDbg::testDofContainerCommunication(*this, sendDofs, recvDofs); ParallelDomainDbg::testDofContainerCommunication(*this, sendDofs, recvDofs);
#endif #endif
int i = 0;
StdMpi<std::vector<DegreeOfFreedom> > stdMpi(mpiComm, false); StdMpi<std::vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
for (RankToDofContainer::iterator sendIt = sendDofs.begin(); for (RankToDofContainer::iterator sendIt = sendDofs.begin();
sendIt != sendDofs.end(); ++sendIt, i++) { sendIt != sendDofs.end(); ++sendIt, i++) {
...@@ -1738,15 +1699,23 @@ namespace AMDiS { ...@@ -1738,15 +1699,23 @@ namespace AMDiS {
for (DofContainer::iterator dofIt = recvIt->second.begin(); for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) { dofIt != recvIt->second.end(); ++dofIt) {
rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[j++]; rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[j++];
isRankDof[rankDofsNewLocalIndex[*dofIt]] = false; isRankDof[**dofIt] = false;
} }
} }
// === Create now the local to global index and local to dof index mappings. === // === Create now the local to global index and local to dof index mappings. ===
createLocalMappings(rankDofsNewLocalIndex, rankOwnedDofsNewLocalIndex, mapLocalGlobalDofs.clear();
rankDofsNewGlobalIndex); mapLocalDofIndex.clear();
for (DofIndexMap::iterator dofIt = rankDofsNewGlobalIndex.begin();
dofIt != rankDofsNewGlobalIndex.end(); ++dofIt)
mapLocalGlobalDofs[*(dofIt->first)] = dofIt->second;
for (int i = 0; i < nRankDofs; i++)
mapLocalDofIndex[i] = *(rankDofs[i]);
// === Update dof admins due to new number of dofs. === // === Update dof admins due to new number of dofs. ===
...@@ -1787,11 +1756,6 @@ namespace AMDiS { ...@@ -1787,11 +1756,6 @@ namespace AMDiS {
vertexDof[*it], vertexDof[*it],
isRankDof[**it]); isRankDof[**it]);
} }
MSG("\n");
for (DofMapping::iterator it = mapLocalDofIndex.begin();
it != mapLocalDofIndex.end(); ++it) {
MSG("mapLocalDofIndex[%d] = %d\n", it->first, it->second);
}
#endif #endif
#endif #endif
} }
...@@ -1827,6 +1791,7 @@ namespace AMDiS { ...@@ -1827,6 +1791,7 @@ namespace AMDiS {
mapLocalGlobalDofs.clear(); mapLocalGlobalDofs.clear();
mapLocalDofIndex.clear(); mapLocalDofIndex.clear();
// Iterate over all DOFs in ranks partition. // Iterate over all DOFs in ranks partition.
for (DofIndexMap::iterator dofIt = rankDofsNewLocalIndex.begin(); for (DofIndexMap::iterator dofIt = rankDofsNewLocalIndex.begin();
dofIt != rankDofsNewLocalIndex.end(); ++dofIt) { dofIt != rankDofsNewLocalIndex.end(); ++dofIt) {
...@@ -1837,12 +1802,13 @@ namespace AMDiS { ...@@ -1837,12 +1802,13 @@ namespace AMDiS {
mapLocalGlobalDofs[newLocalIndex] = newGlobalIndex; mapLocalGlobalDofs[newLocalIndex] = newGlobalIndex;
} }
for (DofIndexMap::iterator dofIt = rankOwnedDofsNewLocalIndex.begin(); for (DofIndexMap::iterator dofIt = rankOwnedDofsNewLocalIndex.begin();
dofIt != rankOwnedDofsNewLocalIndex.end(); ++dofIt) dofIt != rankOwnedDofsNewLocalIndex.end(); ++dofIt)
mapLocalDofIndex[dofIt->second] = *(dofIt->first); mapLocalDofIndex[dofIt->second] = *(dofIt->first);
} }
void MeshDistributor::createDofMemberInfo(DofToPartitions& partitionDofs, void MeshDistributor::createDofMemberInfo(DofToPartitions& partitionDofs,
DofContainer& rankOwnedDofs, DofContainer& rankOwnedDofs,
DofContainer& rankAllDofs, DofContainer& rankAllDofs,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment