From ae62aab52587fd45a718630e6913afeea2c7a491 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Wed, 26 Jan 2011 09:16:17 +0000 Subject: [PATCH] Several small improvements for mesh partitioning. --- AMDiS/src/parallel/ElementObjectData.h | 7 +- AMDiS/src/parallel/MeshDistributor.cc | 14 +-- AMDiS/src/parallel/MeshPartitioner.cc | 22 +++-- AMDiS/src/parallel/MeshPartitioner.h | 14 ++- AMDiS/src/parallel/ParMetisPartitioner.cc | 11 ++- AMDiS/src/parallel/ParMetisPartitioner.h | 5 +- AMDiS/src/parallel/ZoltanPartitioner.cc | 109 ++++++++++++++++++++-- AMDiS/src/parallel/ZoltanPartitioner.h | 34 ++++++- 8 files changed, 178 insertions(+), 38 deletions(-) diff --git a/AMDiS/src/parallel/ElementObjectData.h b/AMDiS/src/parallel/ElementObjectData.h index 2bf63bd2..8cfaddad 100644 --- a/AMDiS/src/parallel/ElementObjectData.h +++ b/AMDiS/src/parallel/ElementObjectData.h @@ -410,15 +410,18 @@ namespace AMDiS { return periodicFaces; } - bool getEdgeReverseMode(ElementObjectData &obj0, ElementObjectData &obj1) + inline bool getEdgeReverseMode(ElementObjectData &obj0, ElementObjectData &obj1) { + if (mesh->getDim() == 2) + return true; + TEST_EXIT_DBG(edgeReverseMode.count(make_pair(obj0, obj1))) ("Should not happen!\n"); return edgeReverseMode[make_pair(obj0, obj1)]; } - bool getFaceReverseMode(ElementObjectData &obj0, ElementObjectData &obj1) + inline bool getFaceReverseMode(ElementObjectData &obj0, ElementObjectData &obj1) { TEST_EXIT_DBG(faceReverseMode.count(make_pair(obj0, obj1))) ("Should not happen!\n"); diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index f0278593..f31725ef 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -165,12 +165,12 @@ namespace AMDiS { setInitialElementWeights(); // and now partition the mesh - partitioner->getPartitionMap(oldPartitionMap); + oldPartitionMap = partitioner->getPartitionMap(); bool partitioningSucceed = partitioner->partition(elemWeights, INITIAL); TEST_EXIT(partitioningSucceed)("Initial partitioning does not work!\n"); - partitioner->getPartitionMap(partitionMap); + partitionMap = partitioner->getPartitionMap(); #if (DEBUG != 0) @@ -958,10 +958,10 @@ namespace AMDiS { // === Run ParMETiS to calculate a new mesh partitioning. === - partitioner->useLocalGlobalDofMap(&mapLocalGlobalDofs); + partitioner->setLocalGlobalDofMap(&mapLocalGlobalDofs); bool partitioningSucceed = partitioner->partition(elemWeights, ADAPTIVE_REPART); if (!partitioningSucceed) { - MSG("ParMETIS created empty partition!\n"); + MSG("Mesh partitioner created empty partition!\n"); return; } oldPartitionMap = partitionMap; @@ -1005,8 +1005,6 @@ namespace AMDiS { for (std::set<MacroElement*>::iterator it = newMacroEl.begin(); it != newMacroEl.end(); ++it) { - MSG("NEW MACRO EL: %d\n", (*it)->getIndex()); - MacroElement *mel = *it; // First, reset all neighbour relations. The correct neighbours will be set later. @@ -1126,8 +1124,6 @@ namespace AMDiS { ("Could not find macro element %d\n", *elIt); deleteMacroElements.insert(elIndexMap[*elIt]); - - MSG("REMOVE MACRO EL: %d\n", *elIt); } } @@ -1143,7 +1139,7 @@ namespace AMDiS { meshManipulation.deleteDoubleDofs(newMacroEl, elObjects); mesh->dofCompress(); - partitioner->getPartitionMap(partitionMap); + partitionMap = partitioner->getPartitionMap(); updateInteriorBoundaryInfo(); diff --git a/AMDiS/src/parallel/MeshPartitioner.cc b/AMDiS/src/parallel/MeshPartitioner.cc index 77115afa..5a469b1a 100644 --- a/AMDiS/src/parallel/MeshPartitioner.cc +++ b/AMDiS/src/parallel/MeshPartitioner.cc @@ -31,17 +31,23 @@ namespace AMDiS { elementInRank.clear(); TraverseStack stack; - ElInfo *elInfo = stack.traverseFirst(mesh, 0, Mesh::CALL_EL_LEVEL); + ElInfo *elInfo = + stack.traverseFirst(mesh, 0, + Mesh::CALL_EL_LEVEL | Mesh::FILL_NEIGH | Mesh::FILL_BOUND); while (elInfo) { - Element *element = elInfo->getElement(); + int elIndex = elInfo->getElement()->getIndex(); + int elInRank = std::min(elIndex / elPerRank, mpiSize - 1); - if ((element->getIndex() >= mpiRank * elPerRank && - element->getIndex() < (mpiRank + 1) * elPerRank) || - (element->getIndex() >= mpiSize * elPerRank && - mpiRank == mpiSize - 1)) - elementInRank[element->getIndex()] = true; + if (elInRank == mpiRank) + elementInRank[elIndex] = true; else - elementInRank[element->getIndex()] = false; + elementInRank[elIndex] = false; + + partitionMap[elIndex] = elInRank; + + for (int i = 0; i < mesh->getGeo(NEIGH); i++) + if (elInfo->getNeighbour(i) && elInfo->getBoundary(i) == INTERIOR) + elNeighbours[elIndex].push_back(elInfo->getNeighbour(i)->getIndex()); elInfo = stack.traverseNext(elInfo); } diff --git a/AMDiS/src/parallel/MeshPartitioner.h b/AMDiS/src/parallel/MeshPartitioner.h index 1b410280..400df50d 100644 --- a/AMDiS/src/parallel/MeshPartitioner.h +++ b/AMDiS/src/parallel/MeshPartitioner.h @@ -58,9 +58,6 @@ namespace AMDiS { virtual bool partition(map<int, double> &elemWeights, PartitionMode mode = INITIAL) = 0; - /// Creates a map which stores for each element the rank that owns this element. - virtual void getPartitionMap(map<int, int> &partitionMap) = 0; - /// Write partitioner state to disk. void serialize(ostream &out); @@ -72,7 +69,7 @@ namespace AMDiS { mesh = m; } - void useLocalGlobalDofMap(map<DegreeOfFreedom, DegreeOfFreedom> *m) + void setLocalGlobalDofMap(map<DegreeOfFreedom, DegreeOfFreedom> *m) { mapLocalGlobal = m; } @@ -87,6 +84,11 @@ namespace AMDiS { return elementInRank; } + map<int, int>& getPartitionMap() + { + return partitionMap; + } + map<int, vector<int> >& getRecvElements() { return recvElements; @@ -104,9 +106,13 @@ namespace AMDiS { map<DegreeOfFreedom, DegreeOfFreedom> *mapLocalGlobal; + map<int, vector<int> > elNeighbours; + /// Maps to each macro element index if it is in rank's partition or not. map<int, bool> elementInRank; + map<int, int> partitionMap; + map<int, vector<int> > recvElements, sendElements; }; } diff --git a/AMDiS/src/parallel/ParMetisPartitioner.cc b/AMDiS/src/parallel/ParMetisPartitioner.cc index 80a6ef4b..4b65eeef 100644 --- a/AMDiS/src/parallel/ParMetisPartitioner.cc +++ b/AMDiS/src/parallel/ParMetisPartitioner.cc @@ -421,13 +421,18 @@ namespace AMDiS { // === Distribute new partition data. === - return distributePartitioning(&(part[0])); + bool b = distributePartitioning(&(part[0])); + + if (b) + createPartitionMap(); + + return b; } - void ParMetisPartitioner::getPartitionMap(map<int, int> &partitionMap) + void ParMetisPartitioner::createPartitionMap() { - FUNCNAME("ParMetisPartitioner::getPartitionMap()"); + FUNCNAME("ParMetisPartitioner::createPartitionMap()"); partitionMap.clear(); diff --git a/AMDiS/src/parallel/ParMetisPartitioner.h b/AMDiS/src/parallel/ParMetisPartitioner.h index 092b3daa..36db8e25 100644 --- a/AMDiS/src/parallel/ParMetisPartitioner.h +++ b/AMDiS/src/parallel/ParMetisPartitioner.h @@ -179,9 +179,6 @@ namespace AMDiS { bool partition(map<int, double> &elemWeights, PartitionMode mode = INITIAL); - /// \ref MeshPartitioner::getPartitionMap - void getPartitionMap(map<int, int> &partitionMap); - void setItr(float value) { itr = value; @@ -192,6 +189,8 @@ namespace AMDiS { // case, i.e., because there are empty partitions, the function returns false. bool distributePartitioning(int *part); + void createPartitionMap(); + protected: ParMetisMesh *parMetisMesh; diff --git a/AMDiS/src/parallel/ZoltanPartitioner.cc b/AMDiS/src/parallel/ZoltanPartitioner.cc index 11708dda..7ebf6253 100644 --- a/AMDiS/src/parallel/ZoltanPartitioner.cc +++ b/AMDiS/src/parallel/ZoltanPartitioner.cc @@ -9,6 +9,7 @@ // // See also license.opensource.txt in the distribution. + #include <boost/lexical_cast.hpp> #include "parallel/ZoltanPartitioner.h" #include "Traverse.h" @@ -18,20 +19,26 @@ namespace AMDiS { ZoltanPartitioner::ZoltanPartitioner(MPI::Intracomm *comm) : MeshPartitioner(comm), - zoltan(*comm) + zoltan(*comm), + elWeights(NULL) { zoltan.Set_Num_Obj_Fn(ZoltanFunctions::getNumObj, this); zoltan.Set_Obj_List_Fn(ZoltanFunctions::getObjectList, this); zoltan.Set_Num_Geom_Fn(ZoltanFunctions::getNumGeom, this); zoltan.Set_Geom_Multi_Fn(ZoltanFunctions::getGeomMulti, this); + zoltan.Set_Num_Edges_Multi_Fn(ZoltanFunctions::getNumEdgeMulti, this); + zoltan.Set_Edge_List_Multi_Fn(ZoltanFunctions::getEdgeListMulti, this); } - bool ZoltanPartitioner::partition(map<int, double> &elemWeights, + bool ZoltanPartitioner::partition(map<int, double> &weights, PartitionMode mode) { FUNCNAME("ZoltanPartitioner::partition()"); + if (mode != INITIAL) + elWeights = &weights; + using boost::lexical_cast; int changes; @@ -48,8 +55,16 @@ namespace AMDiS { int *export_to_part; zoltan.Set_Param("LB_METHOD", "RCB"); - zoltan.Set_Param("NUM_GLOBAL_PARTS", - lexical_cast<string>(mpiComm->Get_size()).c_str()); + // zoltan.Set_Param("LB_METHOD", "GRAPH"); + // zoltan.Set_Param("LB_APPROACH", "PARTITION"); + zoltan.Set_Param("OBJ_WEIGHT_DIM", "1"); + +#if (DEBUG != 0) + zoltan.Set_Param("DEBUG_LEVEL", "1"); +#else + zoltan.Set_Param("DEBUG_LEVEL", "0"); +#endif + int err = zoltan.LB_Partition(changes, nGid, nLid, nImportEls, import_global_ids, @@ -83,12 +98,15 @@ namespace AMDiS { sendElements[export_procs[i]].push_back(sendElIndex); } } + + createPartitionMap(); } zoltan.LB_Free_Part(&import_global_ids, &import_local_ids, &import_procs, &import_to_part); zoltan.LB_Free_Part(&export_global_ids, &export_local_ids, - &export_procs, &export_to_part); + &export_procs, &export_to_part); + elWeights = NULL; if (err != ZOLTAN_OK) return false; @@ -97,7 +115,7 @@ namespace AMDiS { } - void ZoltanPartitioner::getPartitionMap(map<int, int> &partitionMap) + void ZoltanPartitioner::createPartitionMap() { FUNCNAME("ZoltanPartitioner::getPartitionMap()"); @@ -139,6 +157,7 @@ namespace AMDiS { MPI_INT); // fill partitionMap + partitionMap.clear(); for (int i = 0; i < mpiSize; i++) for (int j = 0; j < nPartitionElements[i]; j++) partitionMap[partitionElements[elDist[i] + j]] = i; @@ -179,6 +198,20 @@ namespace AMDiS { if (it->second) { globalId[localCounter] = it->first; localId[localCounter] = localCounter; + + if (wgt_dim > 0) { + TEST_EXIT_DBG(wgt_dim == 1)("Should not happen!\n"); + + if (zoltan->elWeights) { + TEST_EXIT_DBG(zoltan->elWeights->count(it->first)) + ("No element weight for element index %d\n", it->first); + + obj_wgts[localCounter] = static_cast<float>((*zoltan->elWeights)[it->first]); + } else { + obj_wgts[localCounter] = 1.0; + } + } + localCounter++; } } @@ -238,5 +271,69 @@ namespace AMDiS { *ierr = ZOLTAN_OK; } + + + void ZoltanFunctions::getNumEdgeMulti(void *data, + int num_gid_entries, + int num_lid_entries, + int num_obj, + ZOLTAN_ID_PTR global_ids, + ZOLTAN_ID_PTR local_ids, + int *num_edges, + int *ierr) + { + FUNCNAME("ZoltanFunctions::getNumEdgeMulti()"); + + ZoltanPartitioner* zoltan = (ZoltanPartitioner*)data; + + for (int i = 0; i < num_obj; i++) { + TEST_EXIT_DBG(zoltan->elNeighbours.count(global_ids[i]))("Should not happen!\n"); + + num_edges[i] = zoltan->elNeighbours[global_ids[i]].size(); + } + + *ierr = ZOLTAN_OK; + } + + void ZoltanFunctions::getEdgeListMulti(void *data, + int num_gid_entries, + int num_lid_entries, + int num_obj, + ZOLTAN_ID_PTR global_ids, + ZOLTAN_ID_PTR local_ids, + int *num_edges, + ZOLTAN_ID_PTR nbor_global_id, + int *nbor_procs, + int wgt_dim, + float *ewgts, + int *ierr) + { + FUNCNAME("ZoltanFunctions::getEdgeListMulti()"); + + ZoltanPartitioner* zoltan = (ZoltanPartitioner*)data; + + int c = 0; + for (int i = 0; i < num_obj; i++) { + TEST_EXIT_DBG(static_cast<int>(zoltan->elNeighbours[global_ids[i]].size()) == + num_edges[i]) + ("Wrong sizes for global el index %d: %d %d\n", + global_ids[i], + zoltan->elNeighbours[global_ids[i]].size(), + num_edges[i]); + + for (int j = 0; j < num_edges[i]; j++) { + int neighIndex = zoltan->elNeighbours[global_ids[i]][j]; + nbor_global_id[c] = neighIndex; + nbor_procs[c] = zoltan->partitionMap[neighIndex]; + c++; + } + } + + + TEST_EXIT_DBG(wgt_dim == 0)("Edge weights are not handled yet!\n"); + + *ierr = ZOLTAN_OK; + } + } diff --git a/AMDiS/src/parallel/ZoltanPartitioner.h b/AMDiS/src/parallel/ZoltanPartitioner.h index 1558a172..41a6319f 100644 --- a/AMDiS/src/parallel/ZoltanPartitioner.h +++ b/AMDiS/src/parallel/ZoltanPartitioner.h @@ -40,13 +40,19 @@ namespace AMDiS { ~ZoltanPartitioner() {} /// \ref MeshPartitioner::partition - bool partition(map<int, double> &elemWeights, PartitionMode mode = INITIAL); + bool partition(map<int, double> &weights, PartitionMode mode = INITIAL); - /// \ref MeshPartitioner::getPartitionMap - void getPartitionMap(map<int, int> &partitionMap); + protected: + void createPartitionMap(); protected: + /// Data structure used by the Zoltan library. Zoltan zoltan; + + /// Pointer to a map that weights all macro elements in rank's partition. + map<int, double> *elWeights; + + friend class ZoltanFunctions; }; @@ -75,6 +81,28 @@ namespace AMDiS { int num_dim, double *geom_vec, int *ierr); + + static void getNumEdgeMulti(void *data, + int num_gid_entries, + int num_lid_entries, + int num_obj, + ZOLTAN_ID_PTR global_ids, + ZOLTAN_ID_PTR local_ids, + int *num_edges, + int *ierr); + + static void getEdgeListMulti(void *data, + int num_gid_entries, + int num_lid_entries, + int num_obj, + ZOLTAN_ID_PTR global_ids, + ZOLTAN_ID_PTR local_ids, + int *num_edges, + ZOLTAN_ID_PTR nbor_global_id, + int *nbor_procs, + int wgt_dim, + float *ewgts, + int *ierr); }; } -- GitLab