// // Software License for AMDiS // // Copyright (c) 2010 Dresden University of Technology // All rights reserved. // Authors: Simon Vey, Thomas Witkowski et al. // // This file is part of AMDiS // // See also license.opensource.txt in the distribution. #include "parallel/ParallelDofMapping.h" #include "parallel/MeshLevelData.h" #include "parallel/StdMpi.h" namespace AMDiS { using namespace std; void DofToMatIndex::getReverse(int rowIndex, int &component, int &dofIndex) { FUNCNAME("DofToMatIndex::getReverse()"); for (map >::iterator it0 = data.begin(); it0 != data.end(); ++it0) for (boost::container::flat_map::iterator it1 = it0->second.begin(); it1 != it0->second.end(); ++it1) if (it1->second == rowIndex) { component = it0->first; dofIndex = it1->first; return; } component = -1; dofIndex = -1; } ComponentDofMap::ComponentDofMap(MeshLevelData* ld) : levelData(ld), dofComm(NULL), feSpace(NULL), needGlobalMapping(false), isNonLocal(false) { clear(); } void ComponentDofMap::clear() { dofMap.clear(); nonRankDofs.clear(); nRankDofs = 0; nLocalDofs = 0; nOverallDofs = 0; rStartDofs = 0; } void ComponentDofMap::update() { FUNCNAME("ComponentDofMap::update()"); // === Compute local indices for all rank owned DOFs. === for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) if (it->second.local == -1 && nonRankDofs.count(it->first) == 0) it->second.local = nRankDofs++; // === Compute number of local and global DOFs in the mapping. === nOverallDofs = 0; rStartDofs = 0; mpi::getDofNumbering(mpiComm, nRankDofs, rStartDofs, nOverallDofs); // === If required, compute also the global indices. === if (needGlobalMapping) { computeGlobalMapping(); if (isNonLocal) computeNonLocalIndices(); } } void ComponentDofMap::computeGlobalMapping() { FUNCNAME("ComponentDofMap::computeGlobalMapping()"); for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) it->second.global = it->second.local + rStartDofs; } void ComponentDofMap::computeNonLocalIndices() { FUNCNAME("ComponentDofMap::computeNonLocalIndices()"); TEST_EXIT_DBG(dofComm)("No DOF communicator defined!\n"); typedef map >::iterator it_type; // === Send all global indices of DOFs that are owned by the rank to all === // === other ranks that also include this DOF. === StdMpi > stdMpi(mpiComm); for (DofComm::Iterator it(dofComm->getSendDofs(), 0, feSpace); !it.end(); it.nextRank()) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); for (; !it.endDofIter(); it.nextDof()) if (dofMap.count(it.getDofIndex()) && !nonRankDofs.count(it.getDofIndex())) stdMpi.getSendData(rank). push_back(dofMap[it.getDofIndex()].global); } stdMpi.updateSendDataSize(); // === Check from which ranks this rank must receive some data. === for (DofComm::Iterator it(dofComm->getRecvDofs(), 0, feSpace); !it.end(); it.nextRank()) { bool recvFromRank = false; for (; !it.endDofIter(); it.nextDof()) { if (nonRankDofs.count(it.getDofIndex())) { recvFromRank = true; break; } } if (recvFromRank) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); stdMpi.recv(rank); } } // === Start communication to exchange global indices. === stdMpi.startCommunication(); // === And set the global indices for all DOFs that are not owned by rank. === for (DofComm::Iterator it(dofComm->getRecvDofs(), 0, feSpace); !it.end(); it.nextRank()) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); int i = 0; for (; !it.endDofIter(); it.nextDof()) if (nonRankDofs.count(it.getDofIndex())) dofMap[it.getDofIndex()].global = stdMpi.getRecvData(rank)[i++]; } } void ComponentDataEqFeSpace::init(vector &f0, vector &f1, bool isNonLocal, MeshLevelData &levelData) { feSpaces = f1; feSpacesUnique = f0; for (vector::iterator it = feSpacesUnique.begin(); it != feSpacesUnique.end(); ++it) { addFeSpace(*it, levelData); componentData[*it].setNeedGlobalMapping(isNonLocal); componentData[*it].setNonLocal(isNonLocal); } } void ComponentDataEqFeSpace::addFeSpace(const FiniteElemSpace* feSpace, MeshLevelData &levelData) { if (componentData.count(feSpace)) componentData.find(feSpace)->second.clear(); else componentData.insert(make_pair(feSpace, ComponentDofMap(&levelData))); componentData.find(feSpace)->second.setFeSpace(feSpace); } void ComponentDataDiffFeSpace::init(vector &f0, vector &f1, bool isNonLocal, MeshLevelData &levelData) { feSpaces = f1; feSpacesUnique = f0; for (unsigned int component = 0; component < feSpaces.size(); component++) { addComponent(component, feSpaces[component], levelData); componentData[component].setNeedGlobalMapping(isNonLocal); componentData[component].setNonLocal(isNonLocal); } } void ComponentDataDiffFeSpace::addComponent(unsigned int component, const FiniteElemSpace* feSpace, MeshLevelData &levelData) { if (componentData.count(component)) componentData.find(component)->second.clear(); else componentData.insert(make_pair(component, ComponentDofMap(&levelData))); componentData.find(component)->second.setFeSpace(feSpace); } void ParallelDofMapping::init(MeshLevelData &ldata, vector &fe, vector &uniqueFe, bool b) { FUNCNAME("ParallelDofMapping::init()"); levelData = &ldata; isNonLocal = b; data->init(fe, uniqueFe, isNonLocal, ldata); } void ParallelDofMapping::clear() { FUNCNAME("ParallelDofMapping::clear()"); TEST_EXIT_DBG(levelData)("No mesh level data object defined!\n"); for (ComponentIterator &it = data->getIteratorData(); !it.end(); it.next()) it->clear(); nRankDofs = -1; nLocalDofs = -1; nOverallDofs = -1; rStartDofs = -1; dofToMatIndex.clear(); } void ParallelDofMapping::setMpiComm(MPI::Intracomm &m, int l) { mpiComm = m; meshLevel = l; for (ComponentIterator &it = data->getIteratorData(); !it.end(); it.next()) it->setMpiComm(m, l); } void ParallelDofMapping::setDofComm(DofComm &dc) { FUNCNAME("ParallelDofMapping::setDofComm()"); dofComm = &dc; // Add the DOF communicator also to all FE space DOF mappings. for (ComponentIterator &it = data->getIteratorData(); !it.end(); it.next()) it->setDofComm(dc); } int ParallelDofMapping::computeRankDofs() { FUNCNAME("ParallelDofMapping::computeRankDofs()"); int result = 0; for (ComponentIterator &it = data->getIteratorComponent(); !it.end(); it.next()) result += it->nRankDofs; return result; } int ParallelDofMapping::computeLocalDofs() { FUNCNAME("ParallelDofMapping::computeLocalDofs()"); int result = 0; for (ComponentIterator &it = data->getIteratorComponent(); !it.end(); it.next()) result += it->nLocalDofs; return result; } int ParallelDofMapping::computeOverallDofs() { FUNCNAME("ParallelDofMapping::computeOverallDofs()"); int result = 0; for (ComponentIterator &it = data->getIteratorComponent(); !it.end(); it.next()) result += it->nOverallDofs; return result; } int ParallelDofMapping::computeStartDofs() { FUNCNAME("ParallelDofMapping::computeStartDofs()"); int result = 0; for (ComponentIterator &it = data->getIteratorComponent(); !it.end(); it.next()) result += it->rStartDofs; return result; } void ParallelDofMapping::update() { FUNCNAME("ParallelDofMapping::update()"); // First, update all FE space DOF mappings. for (ComponentIterator &it = data->getIteratorData(); !it.end(); it.next()) it->update(); // Compute all numbers from this mappings. nRankDofs = computeRankDofs(); nLocalDofs = computeLocalDofs(); nOverallDofs = computeOverallDofs(); rStartDofs = computeStartDofs(); // And finally, compute the matrix indices. computeMatIndex(needMatIndexFromGlobal); } void ParallelDofMapping::update(vector& fe) { FUNCNAME("ParallelDofMapping::update()"); ERROR_EXIT("DAS MUSS ICH MIR MAL UEBERLEGEN!\n"); // for (vector::iterator it = fe.begin(); // it != fe.end(); ++it) // if (find(feSpacesUnique.begin(), feSpacesUnique.end(), *it) == // feSpacesUnique.end()) // ERROR_EXIT("Wrong FE space!\n"); // feSpaces = fe; update(); } void ParallelDofMapping::computeMatIndex(bool globalIndex) { FUNCNAME("ParallelDofMapping::computeMatIndex()"); dofToMatIndex.clear(); // The offset is always added to the local matrix index. The offset for the // DOFs in the first FE spaces is the smalled global index of a DOF that is // owned by the rank. int offset = rStartDofs; // === Create the matrix indices for all component FE spaces. === vector &feSpaces = data->getFeSpaces(); for (unsigned int i = 0; i < feSpaces.size(); i++) { // Traverse all DOFs of the FE space and create for all rank owned DOFs // a matrix index. DofMap& dofMap = (*data)[i].getMap(); for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) { if ((*data)[i].isRankDof(it->first)) { int globalMatIndex = it->second.local + offset; if (globalIndex) dofToMatIndex.add(i, it->second.global, globalMatIndex); else dofToMatIndex.add(i, it->first, globalMatIndex); } } // Increase the offset for the next FE space by the number of DOFs owned // by the rank in the current FE space. offset += (*data)[i].nRankDofs; // If there are no non local DOFs, continue with the next FE space. if (!isNonLocal) continue; TEST_EXIT_DBG(dofComm != NULL)("No communicator given!\n"); // === Communicate the matrix indices for all DOFs that are on some === // === interior boundaries. === StdMpi > stdMpi(mpiComm); for (DofComm::Iterator it(dofComm->getSendDofs(), 0, feSpaces[i]); !it.end(); it.nextRank()) { vector sendGlobalDofs; for (; !it.endDofIter(); it.nextDof()) if (dofMap.count(it.getDofIndex())) if (globalIndex) sendGlobalDofs.push_back(dofToMatIndex.get(i, dofMap[it.getDofIndex()].global)); else sendGlobalDofs.push_back(dofToMatIndex.get(i, it.getDofIndex())); int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); stdMpi.send(rank, sendGlobalDofs); } for (DofComm::Iterator it(dofComm->getRecvDofs(), 0, feSpaces[i]); !it.end(); it.nextRank()) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); stdMpi.recv(rank); } stdMpi.startCommunication(); for (DofComm::Iterator it(dofComm->getRecvDofs(), 0, feSpaces[i]); !it.end(); it.nextRank()) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); int counter = 0; for (; !it.endDofIter(); it.nextDof()) { if (dofMap.count(it.getDofIndex())) { DegreeOfFreedom d = stdMpi.getRecvData(rank)[counter++]; if (globalIndex) dofToMatIndex.add(i, dofMap[it.getDofIndex()].global, d); else dofToMatIndex.add(i, it.getDofIndex(), d); } } } // === Communicate DOFs on periodic boundaries. === stdMpi.clear(); for (DofComm::Iterator it(dofComm->getPeriodicDofs(), 0, feSpaces[i]); !it.end(); it.nextRank()) { vector sendGlobalDofs; for (; !it.endDofIter(); it.nextDof()) if (dofMap.count(it.getDofIndex())) { if (globalIndex) { sendGlobalDofs.push_back(dofMap[it.getDofIndex()].global); sendGlobalDofs.push_back(dofToMatIndex.get(i, dofMap[it.getDofIndex()].global)); } else { sendGlobalDofs.push_back(dofToMatIndex.get(i, it.getDofIndex())); } } int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); stdMpi.send(rank, sendGlobalDofs); stdMpi.recv(rank); } stdMpi.startCommunication(); for (DofComm::Iterator it(dofComm->getPeriodicDofs(), 0, feSpaces[i]); !it.end(); it.nextRank()) { int rank = it.getRank(); if (meshLevel > 0) rank = levelData->mapRank(rank, 0, meshLevel); int counter = 0; for (; !it.endDofIter(); it.nextDof()) if (dofMap.count(it.getDofIndex())) { if (globalIndex) { TEST_EXIT_DBG(counter + 2 <= stdMpi.getRecvData(it.getRank()).size()) ("Should not happen!\n"); dofToMatIndex.add(i, stdMpi.getRecvData(it.getRank())[counter], stdMpi.getRecvData(it.getRank())[counter + 1]); counter += 2; } else { dofToMatIndex.add(i, it.getDofIndex(), stdMpi.getRecvData(it.getRank())[counter++]); } } } } } void ParallelDofMapping::createIndexSet(IS &is, int firstComponent, int nComponents) { FUNCNAME("ParallelDofMapping::createIndexSet()"); int firstRankDof = -1; ComponentDofMap &compMap = (*data)[firstComponent]; DofMap &dofMap = compMap.getMap(); for (DofMap::iterator it = dofMap.begin(); it != dofMap.end(); ++it) { if (compMap.isRankDof(it->first)) { if (needMatIndexFromGlobal) firstRankDof = it->second.global; else firstRankDof = it->first; break; } } TEST_EXIT_DBG(firstRankDof >= 0)("No rank DOF found!\n"); int firstMatIndex = dofToMatIndex.get(firstComponent, firstRankDof); int nRankRows = 0; for (int i = firstComponent; i < firstComponent + nComponents; i++) nRankRows += (*data)[i].nRankDofs; ISCreateStride(mpiComm, nRankRows, firstMatIndex, 1, &is); } }