diff --git a/AMDiS/src/parallel/MatrixNnzStructure.cc b/AMDiS/src/parallel/MatrixNnzStructure.cc index 382267c84edb6cf92f395e0ededda45cf642aa03..b47294c5a74fea6304dee5b8b1622be64022e49d 100644 --- a/AMDiS/src/parallel/MatrixNnzStructure.cc +++ b/AMDiS/src/parallel/MatrixNnzStructure.cc @@ -77,6 +77,8 @@ namespace AMDiS { if ((*mat)[i][j]) feSpace = (*mat)[i][j]->getRowFeSpace(); + TEST_EXIT_DBG(feSpace)("No FE space found!\n"); + for (DofComm::Iterator it(rowDofMap.getDofComm().getRecvDofs(), feSpace); !it.end(); it.nextRank()) { sendMatrixEntry[it.getRank()].resize(0); diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index 8dfafe07e4370229aaa27084df09e330dc1e163c..1c7727a4b48dea5a309997e7594ed7b1febabf16 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -1721,8 +1721,13 @@ namespace AMDiS { ParallelDebug::writeDebugFile(feSpaces[feSpaces.size() - 1], dofMap, debugOutputDir + "mpi-dbg", "dat"); debug::testSortedDofs(mesh, elMap); - ParallelDebug::testCommonDofs(*this, true); - ParallelDebug::testGlobalIndexByCoords(*this); + + int test = 0; + Parameters::get("parallel->remove periodic boundary", test); + if (test == 0) { + ParallelDebug::testCommonDofs(*this, true); + ParallelDebug::testGlobalIndexByCoords(*this); + } #else for (unsigned int i = 0; i < feSpaces.size(); i++) MSG("FE space %d: nRankDofs = %d nOverallDofs = %d\n", diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h index 7da23a1a31c5e2fedee5248d498417d9af96a2eb..c5b6681495f7921ad08f169879800478379602e6 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.h +++ b/AMDiS/src/parallel/ParallelDofMapping.h @@ -300,6 +300,7 @@ namespace AMDiS { public: ParallelDofMapping() : levelData(NULL), + dofComm(NULL), hasNonLocalDofs(false), needMatIndex(false), needMatIndexFromGlobal(false), @@ -346,7 +347,9 @@ namespace AMDiS { /// Returns the DOF communicator. DofComm& getDofComm() { - TEST_EXIT_DBG(dofComm); + FUNCNAME("ParallelDofMapping::getDofComm()"); + + TEST_EXIT_DBG(dofComm)("No DOF communicator object defined!\n"); return *dofComm; } diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index b10946663cb195c8e2a8e0f4ab9f923fdb33fa09..c0a42f67acec77e8564c876b973a2d6c0b3cc7cd 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -346,7 +346,9 @@ namespace AMDiS { if (fetiPreconditioner != FETI_NONE) interiorDofMap.setMpiComm(levelData.getMpiComm(meshLevel), meshLevel); - if (meshLevel > 0) + if (meshLevel == 0) + localDofMap.setDofComm(meshDistributor->getDofComm()); + else localDofMap.setDofComm(meshDistributor->getDofCommSd()); for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index aa4a7847cbd6f602ba443cd7dccdd2068588db3f..91681988d6da70e003ef26f612d6b83be5bb43ef 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -123,6 +123,8 @@ namespace AMDiS { void PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace(Matrix<DOFMatrix*> *mat) { FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace()"); + + TEST_EXIT_DBG(interiorMap)("Should not happen!\n"); vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);