diff --git a/AMDiS/src/parallel/ElementObjectDatabase.cc b/AMDiS/src/parallel/ElementObjectDatabase.cc index a2c912355df84520158bd23c51af5b8fcd11db62..66a627049f8e8d7d7f6b62c4753707fe7049ddc2 100644 --- a/AMDiS/src/parallel/ElementObjectDatabase.cc +++ b/AMDiS/src/parallel/ElementObjectDatabase.cc @@ -601,7 +601,7 @@ namespace AMDiS { for (vector<ElementObjectData>::iterator it = objData->begin(); it != objData->end(); ++it) { int elRank = (*macroElementRankMap)[it->elIndex]; - if (allRanks || levelData->getLevelRanks(level).count(elRank)) + // if (allRanks || levelData->getLevelRanks(level).count(elRank)) owner = std::max(owner, elRank); } diff --git a/AMDiS/src/parallel/InteriorBoundary.cc b/AMDiS/src/parallel/InteriorBoundary.cc index 2e3125a27ae69cb59ac79c14ed2edad929bc243d..7f1352dbad55d9fefcf0d1f832b1d4f6d47d2f91 100644 --- a/AMDiS/src/parallel/InteriorBoundary.cc +++ b/AMDiS/src/parallel/InteriorBoundary.cc @@ -35,8 +35,8 @@ namespace AMDiS { Mesh *mesh = elObjDb.getMesh(); TEST_EXIT_DBG(mesh)("Should not happen!\n"); - int mpiRank = levelData.getMpiComm(0); - MPI::Intracomm mpiComm = levelData.getMpiComm(level); + MPI::Intracomm mpiComm = MPI::COMM_WORLD; //levelData.getMpiComm(level); + int mpiRank = mpiComm.Get_rank(); std::set<int> levelRanks = levelData.getLevelRanks(level); // === Create interior boundary data structure. === @@ -47,10 +47,14 @@ namespace AMDiS { while (elObjDb.iterate(geoIndex)) { map<int, ElementObjectData>& objData = elObjDb.getIterateData(); + MSG("TEST BOUNDARY: %d, %d %d\n", mpiRank, objData.count(mpiRank), objData.size()); + + // Test, if this is a boundary object of this rank. if (!(objData.count(mpiRank) && objData.size() > 1)) continue; + MSG("CREATE BOUNDARY!\n"); #if 0 // Test, if the boundary object defines an interior boundary within the // ranks of the MPI group. If not, go to next element. diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index fff980a58a38cf0efbccd2dff3452fc7ee7f63a3..ebe85db859dfd26c2400bdabd8b7487688d5c5b2 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -1545,6 +1545,8 @@ namespace AMDiS { if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) return; + MSG("START CREATE!\n"); + int nLevels = levelData.getLevelNumber(); boundaryDofInfo.resize(nLevels); @@ -1553,6 +1555,8 @@ namespace AMDiS { for (int level = 0; level < nLevels; level++) { + MSG("ONE RUN!\n"); + // === Clear data. === for (int geo = FACE; geo >= VERTEX; geo--) boundaryDofInfo[level][feSpace].geoDofs[static_cast<GeoIndex>(geo)].clear(); diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc index 7d01d74dc99aa05ddc2e0fb8bc33c296bcb2671a..c309e5c727c2d7e2d01c023f870102dfc5b9b51b 100644 --- a/AMDiS/src/parallel/ParallelDebug.cc +++ b/AMDiS/src/parallel/ParallelDebug.cc @@ -726,6 +726,8 @@ namespace AMDiS { if (tmp <= 0 && force == false) return; + MSG("Interior boundary info:\n"); + for (InteriorBoundary::iterator it(pdb.intBoundary.own, level); !it.end(); ++it) { MSG("Rank owned boundary with rank %d: \n", it.getRank()); diff --git a/AMDiS/src/parallel/ParallelDofMapping.cc b/AMDiS/src/parallel/ParallelDofMapping.cc index 4212489d868f9127d1dc5009821a7c235486bbda..2673338a09580829ca90ab46c855cd73b0c24a87 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.cc +++ b/AMDiS/src/parallel/ParallelDofMapping.cc @@ -26,6 +26,11 @@ namespace AMDiS { nonRankDofs.clear(); nonRankDofs.resize(nLevel); + nRankDofs.clear(); + nLocalDofs.clear(); + nOverallDofs.clear(); + rStartDofs.clear(); + nRankDofs.resize(nLevel, 0); nLocalDofs.resize(nLevel, 0); nOverallDofs.resize(nLevel, 0); diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index b7163f260e11139302effd260e063664772767fa..b9013ed23ca6539a1332141ac5b788b1ce8c67f4 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -353,6 +353,8 @@ namespace AMDiS { DofContainerSet& vertices = meshDistributor->getBoundaryDofInfo(feSpace, meshLevel).geoDofs[VERTEX]; + MSG("SIZE IF VERTEX: %d\n", vertices.size()); + DofIndexSet primals; for (DofContainerSet::iterator it = vertices.begin(); it != vertices.end(); ++it) diff --git a/test/mpi/src/test0002.cc b/test/mpi/src/test0002.cc index e373ace6812bdeb56f9fc6b3e2c51ac1718a572f..c245dc7aa42f2cf9f4f73fa7b668527966e96b57 100644 --- a/test/mpi/src/test0002.cc +++ b/test/mpi/src/test0002.cc @@ -35,6 +35,8 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) feti.initialize(feSpaces); feti.createFetiData(); + ParallelDebug::printBoundaryInfo(*meshDist, 0, true); + BOOST_REQUIRE(feti.getNumberOfPrimals() == 21); BOOST_REQUIRE(feti.getNumberOfDuals() == 48); @@ -52,12 +54,19 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) testData.push_back(dofMap[feSpace].nOverallDofs[0]); BOOST_REQUIRE(data.size() - 1 == testData.size()); - BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); + for (int i = 0; i < 5; i++) { + MSG("TEST FOR %d: %f %f\n", i, data[i + 1], testData[i]); + if (data[i + 1] != testData[i]) { + MSG("Wrong data in column %d: %f %f\n", i, data[i + 1], testData[i]); + BOOST_ERROR(""); + } + } ellipt.getRefinementManager()->globalRefine(mesh, 6); meshDist->checkMeshChange(); feti.createFetiData(); + ParallelDebug::printBoundaryInfo(*meshDist, 0, true); sheet.read("data/data0002b"); @@ -70,7 +79,15 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) testData.push_back(dofMap[feSpace].nOverallDofs[0]); BOOST_REQUIRE(data.size() - 1 == testData.size()); - BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); + for (int i = 0; i < 5; i++) { + MSG("TEST FOR %d: %f %f\n", i, data[i + 1], testData[i]); + if (data[i + 1] != testData[i]) { + MSG("Wrong data in column %d: %f %f\n", i, data[i + 1], testData[i]); + BOOST_ERROR(""); + } + } + + // BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); }