Commit 90c1f562 authored by Thomas Witkowski's avatar Thomas Witkowski

Fixed small bug related to NNZ computations in FETI-DP context.

parent bcdca133
...@@ -77,6 +77,8 @@ namespace AMDiS { ...@@ -77,6 +77,8 @@ namespace AMDiS {
if ((*mat)[i][j]) if ((*mat)[i][j])
feSpace = (*mat)[i][j]->getRowFeSpace(); feSpace = (*mat)[i][j]->getRowFeSpace();
TEST_EXIT_DBG(feSpace)("No FE space found!\n");
for (DofComm::Iterator it(rowDofMap.getDofComm().getRecvDofs(), feSpace); for (DofComm::Iterator it(rowDofMap.getDofComm().getRecvDofs(), feSpace);
!it.end(); it.nextRank()) { !it.end(); it.nextRank()) {
sendMatrixEntry[it.getRank()].resize(0); sendMatrixEntry[it.getRank()].resize(0);
......
...@@ -1721,8 +1721,13 @@ namespace AMDiS { ...@@ -1721,8 +1721,13 @@ namespace AMDiS {
ParallelDebug::writeDebugFile(feSpaces[feSpaces.size() - 1], dofMap, ParallelDebug::writeDebugFile(feSpaces[feSpaces.size() - 1], dofMap,
debugOutputDir + "mpi-dbg", "dat"); debugOutputDir + "mpi-dbg", "dat");
debug::testSortedDofs(mesh, elMap); debug::testSortedDofs(mesh, elMap);
ParallelDebug::testCommonDofs(*this, true);
ParallelDebug::testGlobalIndexByCoords(*this); int test = 0;
Parameters::get("parallel->remove periodic boundary", test);
if (test == 0) {
ParallelDebug::testCommonDofs(*this, true);
ParallelDebug::testGlobalIndexByCoords(*this);
}
#else #else
for (unsigned int i = 0; i < feSpaces.size(); i++) for (unsigned int i = 0; i < feSpaces.size(); i++)
MSG("FE space %d: nRankDofs = %d nOverallDofs = %d\n", MSG("FE space %d: nRankDofs = %d nOverallDofs = %d\n",
......
...@@ -300,6 +300,7 @@ namespace AMDiS { ...@@ -300,6 +300,7 @@ namespace AMDiS {
public: public:
ParallelDofMapping() ParallelDofMapping()
: levelData(NULL), : levelData(NULL),
dofComm(NULL),
hasNonLocalDofs(false), hasNonLocalDofs(false),
needMatIndex(false), needMatIndex(false),
needMatIndexFromGlobal(false), needMatIndexFromGlobal(false),
...@@ -346,7 +347,9 @@ namespace AMDiS { ...@@ -346,7 +347,9 @@ namespace AMDiS {
/// Returns the DOF communicator. /// Returns the DOF communicator.
DofComm& getDofComm() DofComm& getDofComm()
{ {
TEST_EXIT_DBG(dofComm); FUNCNAME("ParallelDofMapping::getDofComm()");
TEST_EXIT_DBG(dofComm)("No DOF communicator object defined!\n");
return *dofComm; return *dofComm;
} }
......
...@@ -346,7 +346,9 @@ namespace AMDiS { ...@@ -346,7 +346,9 @@ namespace AMDiS {
if (fetiPreconditioner != FETI_NONE) if (fetiPreconditioner != FETI_NONE)
interiorDofMap.setMpiComm(levelData.getMpiComm(meshLevel), meshLevel); interiorDofMap.setMpiComm(levelData.getMpiComm(meshLevel), meshLevel);
if (meshLevel > 0) if (meshLevel == 0)
localDofMap.setDofComm(meshDistributor->getDofComm());
else
localDofMap.setDofComm(meshDistributor->getDofCommSd()); localDofMap.setDofComm(meshDistributor->getDofCommSd());
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
......
...@@ -123,6 +123,8 @@ namespace AMDiS { ...@@ -123,6 +123,8 @@ namespace AMDiS {
void PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace(Matrix<DOFMatrix*> *mat) void PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace(Matrix<DOFMatrix*> *mat)
{ {
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace()"); FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrixWithCoarseSpace()");
TEST_EXIT_DBG(interiorMap)("Should not happen!\n");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat); vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment