diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h index 02be39fa9855a222972124966956ca2b950a2439..e8ff50e6f0adc883b00c4245a001ccdebb2438f3 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.h +++ b/AMDiS/src/parallel/ParallelDofMapping.h @@ -120,7 +120,7 @@ namespace AMDiS { /// global index must not be set. MultiIndex& operator[](DegreeOfFreedom d) { - TEST_EXIT_DBG(dofMap.count(d))("Should not happen!\n"); + TEST_EXIT_DBG(dofMap.count(d))("DOF %d is not in map!\n", d); return dofMap[d]; } diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc index 72569145d4fc7cab201946b88022164504377f13..251bd0f73b4c3164f9f4a635db05d669ddec9e4e 100644 --- a/AMDiS/src/parallel/PetscProblemStat.cc +++ b/AMDiS/src/parallel/PetscProblemStat.cc @@ -86,6 +86,16 @@ namespace AMDiS { double wtime = MPI::Wtime(); +#if 0 + double vm, rss; + processMemUsage(vm, rss); + MSG("STAGE 1\n"); + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); + mpi::globalAdd(vm); + mpi::globalAdd(rss); + MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); +#endif + if (createMatrixData) { petscSolver->setMeshDistributor(meshDistributor, meshDistributor->getMpiComm(), @@ -96,6 +106,15 @@ namespace AMDiS { petscSolver->fillPetscRhs(rhs); +#if 0 + processMemUsage(vm, rss); + MSG("STAGE 2\n"); + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); + mpi::globalAdd(vm); + mpi::globalAdd(rss); + MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); +#endif + petscSolver->solvePetscMatrix(*solution, adaptInfo); petscSolver->destroyVectorData(); @@ -103,6 +122,15 @@ namespace AMDiS { if (!storeMatrixData) petscSolver->destroyMatrixData(); +#if 0 + processMemUsage(vm, rss); + MSG("STAGE 3\n"); + MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); + mpi::globalAdd(vm); + mpi::globalAdd(rss); + MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); +#endif + INFO(info, 8)("solution of discrete system needed %.5f seconds\n", MPI::Wtime() - wtime); } diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 21a1984205d2b37559fb06f54bdd10de2e4060bb..78146dc3aac96ad5fc38fabd951584c1a1825a81 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -450,9 +450,16 @@ namespace AMDiS { MatDestroy(&matIntInt); KSPDestroy(&kspInterior); - if (petscSolVec != PETSC_NULL) + if (coarseSpaceMap) { + MatDestroy(&matCoarseCoarse); + MatDestroy(&matCoarseInt); + MatDestroy(&matIntCoarse); + } + + if (petscSolVec != PETSC_NULL) { VecDestroy(&petscSolVec); - petscSolVec = PETSC_NULL; + petscSolVec = PETSC_NULL; + } } @@ -461,6 +468,9 @@ namespace AMDiS { FUNCNAME("PetscSolverGlobalMatrix::destroyVectorData()"); VecDestroy(&rhsInterior); + + if (coarseSpaceMap) + VecDestroy(&rhsCoarseSpace); } @@ -702,22 +712,26 @@ namespace AMDiS { if (rankOnly && !(*interiorMap)[feSpace].isRankDof(dofIt.getDOFIndex())) continue; - // Calculate global row index of the DOF. - DegreeOfFreedom globalRowDof = - (*interiorMap)[feSpace][dofIt.getDOFIndex()].global; - // Get PETSc's mat index of the row DOF. - int index = 0; - if (interiorMap->isMatIndexFromGlobal()) - index = - interiorMap->getMatIndex(nRowVec, globalRowDof) + rStartInterior; - else - index = - interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()) + rStartInterior; - - if (perMap.isPeriodic(feSpace, globalRowDof)) { - if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) { - ERROR_EXIT("Periodic coarse space not yet supported!\n"); - } else { + if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) { + TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n"); + + int index = coarseSpaceMap->getMatIndex(nRowVec, dofIt.getDOFIndex()); + VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES); + } else { + // Calculate global row index of the DOF. + DegreeOfFreedom globalRowDof = + (*interiorMap)[feSpace][dofIt.getDOFIndex()].global; + + // Get PETSc's mat index of the row DOF. + int index = 0; + if (interiorMap->isMatIndexFromGlobal()) + index = + interiorMap->getMatIndex(nRowVec, globalRowDof) + rStartInterior; + else + index = + interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()) + rStartInterior; + + if (perMap.isPeriodic(feSpace, globalRowDof)) { std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof); double value = *dofIt / (perAsc.size() + 1.0); VecSetValue(vecInterior, index, value, ADD_VALUES); @@ -727,17 +741,10 @@ namespace AMDiS { int mappedDof = perMap.map(feSpace, *perIt, globalRowDof); int mappedIndex = interiorMap->getMatIndex(nRowVec, mappedDof); VecSetValue(vecInterior, mappedIndex, value, ADD_VALUES); - } - } - } else { - // The DOF index is not periodic. - - if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) { - TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n"); - - index = coarseSpaceMap->getMatIndex(nRowVec, dofIt.getDOFIndex()); - VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES); - } else { + } + } else { + // The DOF index is not periodic. + VecSetValue(vecInterior, index, *dofIt, ADD_VALUES); } }