From 45e59970d6a3d66678ce8461ae184d146d5538dd Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Mon, 16 Apr 2012 19:50:43 +0000 Subject: [PATCH] Added MPI grouping to support multi level FETI-DP. --- AMDiS/src/parallel/InteriorBoundary.h | 8 ++++++++ AMDiS/src/parallel/MeshLevelData.cc | 6 ++++++ AMDiS/src/parallel/MeshLevelData.h | 28 +++++++++++++++++++++++++++ AMDiS/src/parallel/PetscSolverFeti.cc | 2 +- 4 files changed, 43 insertions(+), 1 deletion(-) diff --git a/AMDiS/src/parallel/InteriorBoundary.h b/AMDiS/src/parallel/InteriorBoundary.h index fa105823..ff081f56 100644 --- a/AMDiS/src/parallel/InteriorBoundary.h +++ b/AMDiS/src/parallel/InteriorBoundary.h @@ -212,6 +212,14 @@ namespace AMDiS { if (mapIt == bound.boundary.end()) return; + if (level > 0) { + TEST_EXIT_DBG(levelData)("No mesh level data object defined!\n"); + TEST_EXIT_DBG(level == 1)("Only 2-level method supported!\n"); + + int rankInLevel = levelData->mapRank(mapIt->first, level - 1, level); + MSG("rankInLevel %d\n", rankInLevel); + } + while (mapIt->second.size() == 0) { ++mapIt; if (mapIt == bound.boundary.end()) diff --git a/AMDiS/src/parallel/MeshLevelData.cc b/AMDiS/src/parallel/MeshLevelData.cc index 889b0543..e0170a9d 100644 --- a/AMDiS/src/parallel/MeshLevelData.cc +++ b/AMDiS/src/parallel/MeshLevelData.cc @@ -30,6 +30,9 @@ namespace AMDiS { mpiComms.resize(1); mpiComms[0] = MPI::COMM_WORLD; + + mpiGroups.resize(1); + mpiGroups[0] = mpiComms[0].Get_group(); } @@ -52,6 +55,9 @@ namespace AMDiS { mpiComms.resize(2); mpiComms[1] = mpiComms[0].Split(domainId, mpiComms[0].Get_rank()); + + mpiGroups.resize(2); + mpiGroups[1] = mpiComms[1].Get_group(); } diff --git a/AMDiS/src/parallel/MeshLevelData.h b/AMDiS/src/parallel/MeshLevelData.h index b0a0cf3b..d697e5b5 100644 --- a/AMDiS/src/parallel/MeshLevelData.h +++ b/AMDiS/src/parallel/MeshLevelData.h @@ -72,6 +72,32 @@ namespace AMDiS { return nLevel; } + MPI::Intracomm& getMpiComm(int level) + { + TEST_EXIT_DBG(level < nLevel)("Should not happen!\n"); + + return mpiComms[level]; + } + + MPI::Group& getMpiGroup(int level) + { + TEST_EXIT_DBG(level < nLevel)("Should not happen!\n"); + + return mpiGroups[level]; + } + + int mapRank(int fromRank, int fromLevel, int toLevel) + { + int toRank = -1; + + MPI::Group::Translate_ranks(mpiGroups[fromLevel], 1, &fromRank, + mpiGroups[toLevel], &toRank); + if (toRank == MPI::UNDEFINED) + toRank = -1; + + return toRank; + } + protected: int nLevel; @@ -80,6 +106,8 @@ namespace AMDiS { vector<std::set<int> > levelNeighbours; vector<MPI::Intracomm> mpiComms; + + vector<MPI::Group> mpiGroups; }; } diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 65bcc105..4d2058fb 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -314,7 +314,7 @@ namespace AMDiS { DofIndexSet primals; DofContainerSet& vertices = - meshDistributor->getBoundaryDofInfo(feSpace).geoDofs[VERTEX]; + meshDistributor->getBoundaryDofInfo(feSpace, meshLevel).geoDofs[VERTEX]; TEST_EXIT_DBG(vertices.size())("No primal vertices on this rank!\n"); for (DofContainerSet::iterator it = vertices.begin(); it != vertices.end(); ++it) -- GitLab