Commit ef9215bc authored by Thomas Witkowski's avatar Thomas Witkowski

Fixed some small issues for FETI-DP.

parent 36780508
......@@ -20,7 +20,9 @@ namespace AMDiS {
void FeSpaceDofMap::clear()
{
dofMap.clear();
nonRankDofs.clear();
nRankDofs = 0;
nLocalDofs = 0;
nOverallDofs = 0;
rStartDofs = 0;
}
......@@ -137,6 +139,23 @@ namespace AMDiS {
}
void ParallelDofMapping::clear()
{
FUNCNAME("ParallelDofMapping::clear()");
for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
it != feSpacesUnique.end(); ++it)
data[*it].clear();
nRankDofs = -1;
nLocalDofs = -1;
nOverallDofs = -1;
rStartDofs = -1;
dofToMatIndex.clear();
}
void ParallelDofMapping::setDofComm(DofComm &pSend, DofComm &pRecv)
{
FUNCNAME("ParallelDofMapping::setDofComm()");
......
......@@ -273,6 +273,7 @@ namespace AMDiS {
hasNonLocalDofs(false),
feSpaces(0),
nRankDofs(-1),
nLocalDofs(-1),
nOverallDofs(-1),
rStartDofs(-1)
{}
......@@ -291,6 +292,9 @@ namespace AMDiS {
vector<const FiniteElemSpace*> &fe,
bool needGlobalMapping,
bool bNonLocalDofs);
/// Clear all data.
void clear();
/// Set the DOF communicator objects that are required to exchange information
/// about DOFs that are on interior boundaries.
......
......@@ -86,16 +86,6 @@ namespace AMDiS {
double wtime = MPI::Wtime();
#if 0
double vm, rss;
processMemUsage(vm, rss);
MSG("STAGE 1\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
if (createMatrixData) {
petscSolver->setMeshDistributor(meshDistributor);
petscSolver->fillPetscMatrix(systemMatrix);
......@@ -103,29 +93,11 @@ namespace AMDiS {
petscSolver->fillPetscRhs(rhs);
#if 0
processMemUsage(vm, rss);
MSG("STAGE 2\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
petscSolver->solvePetscMatrix(*solution, adaptInfo);
if (!storeMatrixData)
petscSolver->destroyMatrixData();
#if 0
processMemUsage(vm, rss);
MSG("STAGE 3\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
INFO(info, 8)("solution of discrete system needed %.5f seconds\n",
MPI::Wtime() - wtime);
}
......
......@@ -218,7 +218,13 @@ namespace AMDiS {
TEST_EXIT(meshDistributor->getFeSpace()->getBasisFcts()->getDegree() == 1)
("Works for linear basis functions only!\n");
primalDofMap.clear();
dualDofMap.clear();
lagrangeMap.clear();
localDofMap.clear();
interiorDofMap.clear();
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i);
createPrimals(feSpace);
......@@ -236,7 +242,7 @@ namespace AMDiS {
dualDofMap.update();
lagrangeMap.update();
localDofMap.update();
if (fetiPreconditioner == FETI_DIRICHLET)
if (fetiPreconditioner != FETI_NONE)
interiorDofMap.update();
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
......@@ -396,7 +402,7 @@ namespace AMDiS {
dualDofMap[feSpace].isSet(i) == false) {
localDofMap[feSpace].insertRankDof(i, nLocalInterior);
if (fetiPreconditioner == FETI_DIRICHLET)
if (fetiPreconditioner != FETI_NONE)
interiorDofMap[feSpace].insertRankDof(i, nLocalInterior);
nLocalInterior++;
......@@ -495,7 +501,8 @@ namespace AMDiS {
KSPCreate(PETSC_COMM_WORLD, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_");
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPGMRES);
KSPSetFromOptions(ksp_schur_primal);
} else {
MSG("Create direct schur primal solver!\n");
......@@ -582,7 +589,12 @@ namespace AMDiS {
KSPCreate(PETSC_COMM_WORLD, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_primal_primal,
mat_primal_primal, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_");
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPPREONLY);
PC pc_schur_primal;
KSPGetPC(ksp_schur_primal, &pc_schur_primal);
PCSetType(pc_schur_primal, PCLU);
PCFactorSetMatSolverPackage(pc_schur_primal, MATSOLVERMUMPS);
KSPSetFromOptions(ksp_schur_primal);
MSG("Creating Schur primal matrix needed %.5f seconds.\n",
......@@ -643,7 +655,9 @@ namespace AMDiS {
KSPCreate(PETSC_COMM_WORLD, &ksp_feti);
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_feti, "solver_feti_");
KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
KSPSetFromOptions(ksp_feti);
......@@ -659,7 +673,12 @@ namespace AMDiS {
KSPCreate(PETSC_COMM_SELF, &ksp_interior);
KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior,
SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_interior, "solver_interior_");
KSPSetOptionsPrefix(ksp_interior, "precon_interior_");
KSPSetType(ksp_interior, KSPPREONLY);
PC pc_interior;
KSPGetPC(ksp_interior, &pc_interior);
PCSetType(pc_interior, PCLU);
PCFactorSetMatSolverPackage(pc_interior, MATSOLVERUMFPACK);
KSPSetFromOptions(ksp_interior);
fetiDirichletPreconData.mat_lagrange_scaled = &mat_lagrange_scaled;
......@@ -820,7 +839,7 @@ namespace AMDiS {
localIsIndex.push_back(cnt++);
}
}
TEST_EXIT_DBG(cnt == primalDofMap.getLocalDofs())("Should not happen!\n");
}
......@@ -1027,28 +1046,30 @@ namespace AMDiS {
// === For preconditioner ===
if (!rowPrimal && !colPrimal) {
if (!isDual(feSpaces[i], *cursor)) {
if (!isDual(feSpaces[j], col(*icursor))) {
int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor));
colsLocal.push_back(colIndex);
valuesLocal.push_back(value(*icursor));
if (fetiPreconditioner != FETI_NONE) {
if (!rowPrimal && !colPrimal) {
if (!isDual(feSpaces[i], *cursor)) {
if (!isDual(feSpaces[j], col(*icursor))) {
int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor));
colsLocal.push_back(colIndex);
valuesLocal.push_back(value(*icursor));
} else {
int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor));
colsLocalOther.push_back(colIndex);
valuesLocalOther.push_back(value(*icursor));
}
} else {
int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor));
colsLocalOther.push_back(colIndex);
valuesLocalOther.push_back(value(*icursor));
}
} else {
if (!isDual(feSpaces[j], col(*icursor))) {
int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor));
colsLocalOther.push_back(colIndex);
valuesLocalOther.push_back(value(*icursor));
} else {
int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor));
colsLocal.push_back(colIndex);
valuesLocal.push_back(value(*icursor));
}
}
if (!isDual(feSpaces[j], col(*icursor))) {
int colIndex = interiorDofMap.getLocalMatIndex(j, col(*icursor));
colsLocalOther.push_back(colIndex);
valuesLocalOther.push_back(value(*icursor));
} else {
int colIndex = dualDofMap.getLocalMatIndex(j, col(*icursor));
colsLocal.push_back(colIndex);
valuesLocal.push_back(value(*icursor));
}
}
}
}
......@@ -1171,7 +1192,12 @@ namespace AMDiS {
KSPCreate(PETSC_COMM_SELF, &ksp_b);
KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_b, "solver_b_");
KSPSetOptionsPrefix(ksp_b, "interior_");
KSPSetType(ksp_b, KSPPREONLY);
PC pc_b;
KSPGetPC(ksp_b, &pc_b);
PCSetType(pc_b, PCLU);
PCFactorSetMatSolverPackage(pc_b, MATSOLVERUMFPACK);
KSPSetFromOptions(ksp_b);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment