From ef9215bc963bd52792b94ec298bba694ecfd97d1 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Wed, 28 Mar 2012 18:43:52 +0000 Subject: [PATCH] Fixed some small issues for FETI-DP. --- AMDiS/src/parallel/ParallelDofMapping.cc | 19 ++++++ AMDiS/src/parallel/ParallelDofMapping.h | 4 ++ AMDiS/src/parallel/PetscProblemStat.cc | 28 -------- AMDiS/src/parallel/PetscSolverFeti.cc | 86 +++++++++++++++--------- 4 files changed, 79 insertions(+), 58 deletions(-) diff --git a/AMDiS/src/parallel/ParallelDofMapping.cc b/AMDiS/src/parallel/ParallelDofMapping.cc index 60d7041e..9b30b20c 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.cc +++ b/AMDiS/src/parallel/ParallelDofMapping.cc @@ -20,7 +20,9 @@ namespace AMDiS { void FeSpaceDofMap::clear() { dofMap.clear(); + nonRankDofs.clear(); nRankDofs = 0; + nLocalDofs = 0; nOverallDofs = 0; rStartDofs = 0; } @@ -137,6 +139,23 @@ namespace AMDiS { } + void ParallelDofMapping::clear() + { + FUNCNAME("ParallelDofMapping::clear()"); + + for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin(); + it != feSpacesUnique.end(); ++it) + data[*it].clear(); + + nRankDofs = -1; + nLocalDofs = -1; + nOverallDofs = -1; + rStartDofs = -1; + + dofToMatIndex.clear(); + } + + void ParallelDofMapping::setDofComm(DofComm &pSend, DofComm &pRecv) { FUNCNAME("ParallelDofMapping::setDofComm()"); diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h index 0d58e252..9f7fa3fe 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.h +++ b/AMDiS/src/parallel/ParallelDofMapping.h @@ -273,6 +273,7 @@ namespace AMDiS { hasNonLocalDofs(false), feSpaces(0), nRankDofs(-1), + nLocalDofs(-1), nOverallDofs(-1), rStartDofs(-1) {} @@ -291,6 +292,9 @@ namespace AMDiS { vector<const FiniteElemSpace*> &fe, bool needGlobalMapping, bool bNonLocalDofs); + + /// Clear all data. + void clear(); /// Set the DOF communicator objects that are required to exchange information /// about DOFs that are on interior boundaries. diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc index a2afb7d5..fffe72b1 100644 --- a/AMDiS/src/parallel/PetscProblemStat.cc +++ b/AMDiS/src/parallel/PetscProblemStat.cc @@ -86,16 +86,6 @@ namespace AMDiS { double wtime = MPI::Wtime(); -#if 0 - double vm, rss; - processMemUsage(vm, rss); - MSG("STAGE 1\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - mpi::globalAdd(vm); - mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); -#endif - if (createMatrixData) { petscSolver->setMeshDistributor(meshDistributor); petscSolver->fillPetscMatrix(systemMatrix); @@ -103,29 +93,11 @@ namespace AMDiS { petscSolver->fillPetscRhs(rhs); -#if 0 - processMemUsage(vm, rss); - MSG("STAGE 2\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - mpi::globalAdd(vm); - mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); -#endif - petscSolver->solvePetscMatrix(*solution, adaptInfo); if (!storeMatrixData) petscSolver->destroyMatrixData(); -#if 0 - processMemUsage(vm, rss); - MSG("STAGE 3\n"); - MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); - mpi::globalAdd(vm); - mpi::globalAdd(rss); - MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss); -#endif - INFO(info, 8)("solution of discrete system needed %.5f seconds\n", MPI::Wtime() - wtime); } diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index f2afc4f0..f4d4a4e0 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -218,7 +218,13 @@ namespace AMDiS { TEST_EXIT(meshDistributor->getFeSpace()->getBasisFcts()->getDegree() == 1) ("Works for linear basis functions only!\n"); - + + primalDofMap.clear(); + dualDofMap.clear(); + lagrangeMap.clear(); + localDofMap.clear(); + interiorDofMap.clear(); + for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i); createPrimals(feSpace); @@ -236,7 +242,7 @@ namespace AMDiS { dualDofMap.update(); lagrangeMap.update(); localDofMap.update(); - if (fetiPreconditioner == FETI_DIRICHLET) + if (fetiPreconditioner != FETI_NONE) interiorDofMap.update(); for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { @@ -396,7 +402,7 @@ namespace AMDiS { dualDofMap[feSpace].isSet(i) == false) { localDofMap[feSpace].insertRankDof(i, nLocalInterior); - if (fetiPreconditioner == FETI_DIRICHLET) + if (fetiPreconditioner != FETI_NONE) interiorDofMap[feSpace].insertRankDof(i, nLocalInterior); nLocalInterior++; @@ -495,7 +501,8 @@ namespace AMDiS { KSPCreate(PETSC_COMM_WORLD, &ksp_schur_primal); KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_"); + KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); + KSPSetType(ksp_schur_primal, KSPGMRES); KSPSetFromOptions(ksp_schur_primal); } else { MSG("Create direct schur primal solver!\n"); @@ -582,7 +589,12 @@ namespace AMDiS { KSPCreate(PETSC_COMM_WORLD, &ksp_schur_primal); KSPSetOperators(ksp_schur_primal, mat_primal_primal, mat_primal_primal, SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_"); + KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); + KSPSetType(ksp_schur_primal, KSPPREONLY); + PC pc_schur_primal; + KSPGetPC(ksp_schur_primal, &pc_schur_primal); + PCSetType(pc_schur_primal, PCLU); + PCFactorSetMatSolverPackage(pc_schur_primal, MATSOLVERMUMPS); KSPSetFromOptions(ksp_schur_primal); MSG("Creating Schur primal matrix needed %.5f seconds.\n", @@ -643,7 +655,9 @@ namespace AMDiS { KSPCreate(PETSC_COMM_WORLD, &ksp_feti); KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(ksp_feti, "solver_feti_"); + KSPSetOptionsPrefix(ksp_feti, "feti_"); + KSPSetType(ksp_feti, KSPGMRES); + KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); KSPSetFromOptions(ksp_feti); @@ -659,7 +673,12 @@ namespace AMDiS { KSPCreate(PETSC_COMM_SELF, &ksp_interior); KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior, SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(ksp_interior, "solver_interior_"); + KSPSetOptionsPrefix(ksp_interior, "precon_interior_"); + KSPSetType(ksp_interior, KSPPREONLY); + PC pc_interior; + KSPGetPC(ksp_interior, &pc_interior); + PCSetType(pc_interior, PCLU); + PCFactorSetMatSolverPackage(pc_interior, MATSOLVERUMFPACK); KSPSetFromOptions(ksp_interior); fetiDirichletPreconData.mat_lagrange_scaled = &mat_lagrange_scaled; @@ -820,7 +839,7 @@ namespace AMDiS { localIsIndex.push_back(cnt++); } } - + TEST_EXIT_DBG(cnt == primalDofMap.getLocalDofs())("Should not happen!\n"); } @@ -1027,28 +1046,30 @@ namespace AMDiS { // === For preconditioner === - if (!rowPrimal && !colPrimal) { - if (!isDual(feSpaces[i], *cursor)) { - if (!isDual(feSpaces[j], col(*icursor))) { - int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor)); - colsLocal.push_back(colIndex); - valuesLocal.push_back(value(*icursor)); + if (fetiPreconditioner != FETI_NONE) { + if (!rowPrimal && !colPrimal) { + if (!isDual(feSpaces[i], *cursor)) { + if (!isDual(feSpaces[j], col(*icursor))) { + int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor)); + colsLocal.push_back(colIndex); + valuesLocal.push_back(value(*icursor)); + } else { + int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor)); + colsLocalOther.push_back(colIndex); + valuesLocalOther.push_back(value(*icursor)); + } } else { - int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor)); - colsLocalOther.push_back(colIndex); - valuesLocalOther.push_back(value(*icursor)); - } - } else { - if (!isDual(feSpaces[j], col(*icursor))) { - int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor)); - colsLocalOther.push_back(colIndex); - valuesLocalOther.push_back(value(*icursor)); - } else { - int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor)); - colsLocal.push_back(colIndex); - valuesLocal.push_back(value(*icursor)); - } - } + if (!isDual(feSpaces[j], col(*icursor))) { + int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor)); + colsLocalOther.push_back(colIndex); + valuesLocalOther.push_back(value(*icursor)); + } else { + int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor)); + colsLocal.push_back(colIndex); + valuesLocal.push_back(value(*icursor)); + } + } + } } @@ -1171,7 +1192,12 @@ namespace AMDiS { KSPCreate(PETSC_COMM_SELF, &ksp_b); KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(ksp_b, "solver_b_"); + KSPSetOptionsPrefix(ksp_b, "interior_"); + KSPSetType(ksp_b, KSPPREONLY); + PC pc_b; + KSPGetPC(ksp_b, &pc_b); + PCSetType(pc_b, PCLU); + PCFactorSetMatSolverPackage(pc_b, MATSOLVERUMFPACK); KSPSetFromOptions(ksp_b); -- GitLab