From 6fd4d8523c9f747a1d881400a4684df677d257c0 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Thu, 3 May 2012 07:56:08 +0000 Subject: [PATCH] Save juropa data before shutdown. --- AMDiS/src/parallel/ElementObjectDatabase.cc | 2 +- AMDiS/src/parallel/InteriorBoundary.cc | 8 ++++++-- AMDiS/src/parallel/MeshDistributor.cc | 4 ++++ AMDiS/src/parallel/ParallelDebug.cc | 2 ++ AMDiS/src/parallel/ParallelDofMapping.cc | 5 +++++ AMDiS/src/parallel/PetscSolverFeti.cc | 2 ++ test/mpi/src/test0002.cc | 21 +++++++++++++++++++-- 7 files changed, 39 insertions(+), 5 deletions(-) diff --git a/AMDiS/src/parallel/ElementObjectDatabase.cc b/AMDiS/src/parallel/ElementObjectDatabase.cc index a2c91235..66a62704 100644 --- a/AMDiS/src/parallel/ElementObjectDatabase.cc +++ b/AMDiS/src/parallel/ElementObjectDatabase.cc @@ -601,7 +601,7 @@ namespace AMDiS { for (vector<ElementObjectData>::iterator it = objData->begin(); it != objData->end(); ++it) { int elRank = (*macroElementRankMap)[it->elIndex]; - if (allRanks || levelData->getLevelRanks(level).count(elRank)) + // if (allRanks || levelData->getLevelRanks(level).count(elRank)) owner = std::max(owner, elRank); } diff --git a/AMDiS/src/parallel/InteriorBoundary.cc b/AMDiS/src/parallel/InteriorBoundary.cc index 2e3125a2..7f1352db 100644 --- a/AMDiS/src/parallel/InteriorBoundary.cc +++ b/AMDiS/src/parallel/InteriorBoundary.cc @@ -35,8 +35,8 @@ namespace AMDiS { Mesh *mesh = elObjDb.getMesh(); TEST_EXIT_DBG(mesh)("Should not happen!\n"); - int mpiRank = levelData.getMpiComm(0); - MPI::Intracomm mpiComm = levelData.getMpiComm(level); + MPI::Intracomm mpiComm = MPI::COMM_WORLD; //levelData.getMpiComm(level); + int mpiRank = mpiComm.Get_rank(); std::set<int> levelRanks = levelData.getLevelRanks(level); // === Create interior boundary data structure. === @@ -47,10 +47,14 @@ namespace AMDiS { while (elObjDb.iterate(geoIndex)) { map<int, ElementObjectData>& objData = elObjDb.getIterateData(); + MSG("TEST BOUNDARY: %d, %d %d\n", mpiRank, objData.count(mpiRank), objData.size()); + + // Test, if this is a boundary object of this rank. if (!(objData.count(mpiRank) && objData.size() > 1)) continue; + MSG("CREATE BOUNDARY!\n"); #if 0 // Test, if the boundary object defines an interior boundary within the // ranks of the MPI group. If not, go to next element. diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index fff980a5..ebe85db8 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -1545,6 +1545,8 @@ namespace AMDiS { if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) return; + MSG("START CREATE!\n"); + int nLevels = levelData.getLevelNumber(); boundaryDofInfo.resize(nLevels); @@ -1553,6 +1555,8 @@ namespace AMDiS { for (int level = 0; level < nLevels; level++) { + MSG("ONE RUN!\n"); + // === Clear data. === for (int geo = FACE; geo >= VERTEX; geo--) boundaryDofInfo[level][feSpace].geoDofs[static_cast<GeoIndex>(geo)].clear(); diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc index 7d01d74d..c309e5c7 100644 --- a/AMDiS/src/parallel/ParallelDebug.cc +++ b/AMDiS/src/parallel/ParallelDebug.cc @@ -726,6 +726,8 @@ namespace AMDiS { if (tmp <= 0 && force == false) return; + MSG("Interior boundary info:\n"); + for (InteriorBoundary::iterator it(pdb.intBoundary.own, level); !it.end(); ++it) { MSG("Rank owned boundary with rank %d: \n", it.getRank()); diff --git a/AMDiS/src/parallel/ParallelDofMapping.cc b/AMDiS/src/parallel/ParallelDofMapping.cc index 4212489d..2673338a 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.cc +++ b/AMDiS/src/parallel/ParallelDofMapping.cc @@ -26,6 +26,11 @@ namespace AMDiS { nonRankDofs.clear(); nonRankDofs.resize(nLevel); + nRankDofs.clear(); + nLocalDofs.clear(); + nOverallDofs.clear(); + rStartDofs.clear(); + nRankDofs.resize(nLevel, 0); nLocalDofs.resize(nLevel, 0); nOverallDofs.resize(nLevel, 0); diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index b7163f26..b9013ed2 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -353,6 +353,8 @@ namespace AMDiS { DofContainerSet& vertices = meshDistributor->getBoundaryDofInfo(feSpace, meshLevel).geoDofs[VERTEX]; + MSG("SIZE IF VERTEX: %d\n", vertices.size()); + DofIndexSet primals; for (DofContainerSet::iterator it = vertices.begin(); it != vertices.end(); ++it) diff --git a/test/mpi/src/test0002.cc b/test/mpi/src/test0002.cc index e373ace6..c245dc7a 100644 --- a/test/mpi/src/test0002.cc +++ b/test/mpi/src/test0002.cc @@ -35,6 +35,8 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) feti.initialize(feSpaces); feti.createFetiData(); + ParallelDebug::printBoundaryInfo(*meshDist, 0, true); + BOOST_REQUIRE(feti.getNumberOfPrimals() == 21); BOOST_REQUIRE(feti.getNumberOfDuals() == 48); @@ -52,12 +54,19 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) testData.push_back(dofMap[feSpace].nOverallDofs[0]); BOOST_REQUIRE(data.size() - 1 == testData.size()); - BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); + for (int i = 0; i < 5; i++) { + MSG("TEST FOR %d: %f %f\n", i, data[i + 1], testData[i]); + if (data[i + 1] != testData[i]) { + MSG("Wrong data in column %d: %f %f\n", i, data[i + 1], testData[i]); + BOOST_ERROR(""); + } + } ellipt.getRefinementManager()->globalRefine(mesh, 6); meshDist->checkMeshChange(); feti.createFetiData(); + ParallelDebug::printBoundaryInfo(*meshDist, 0, true); sheet.read("data/data0002b"); @@ -70,7 +79,15 @@ BOOST_AUTO_TEST_CASE(amdis_mpi_feti) testData.push_back(dofMap[feSpace].nOverallDofs[0]); BOOST_REQUIRE(data.size() - 1 == testData.size()); - BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); + for (int i = 0; i < 5; i++) { + MSG("TEST FOR %d: %f %f\n", i, data[i + 1], testData[i]); + if (data[i + 1] != testData[i]) { + MSG("Wrong data in column %d: %f %f\n", i, data[i + 1], testData[i]); + BOOST_ERROR(""); + } + } + + // BOOST_REQUIRE(equal(data.begin() + 1, data.end(), testData.begin())); } -- GitLab