Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit 739cafa2 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

uha, fixed THE STOKES BUG...

parent cbcf61f6
......@@ -83,8 +83,7 @@ namespace AMDiS {
vecSol.resize(nCoarseMap + 1);
vecRhs.resize(nCoarseMap + 1);
// === Create map from component number to its coarse space map. ===
componentIthCoarseMap.resize(coarseSpaceMap.size());
......
......@@ -183,6 +183,11 @@ namespace AMDiS {
/// Get the solution vector of some coarse space.
inline Vec& getVecSolCoarse(int coarseSpace = 0)
{
FUNCNAME("ParallelCoarseSpaceMatVec::getVecSolCoarse()");
TEST_EXIT_DBG(coarseSpace + 1 < vecSol.size())
("Wrong component %d, vecSol has only %d!\n", coarseSpace + 1, vecSol.size());
return vecSol[coarseSpace + 1];
}
......
......@@ -273,14 +273,10 @@ namespace AMDiS {
DofIndexSet primals;
for (DofContainerSet::iterator it = vertices.begin();
it != vertices.end(); ++it) {
double e = 1e-8;
if (meshLevel == 0) {
WorldVector<double> c;
feSpace->getMesh()->getDofIndexCoords(*it, feSpace, c);
if (fabs(c[1]) > 1 && fabs(c[1]) < 19)
primals.insert(**it);
primals.insert(**it);
} else {
double e = 1e-8;
WorldVector<double> c;
feSpace->getMesh()->getDofIndexCoords(*it, feSpace, c);
......@@ -685,7 +681,7 @@ namespace AMDiS {
VecAssemblyBegin(tmpVec);
VecAssemblyEnd(tmpVec);
subdomain->solve(tmpVec, tmpVec);
PetscScalar *tmpValues;
......@@ -824,8 +820,8 @@ namespace AMDiS {
MatNullSpace matNullspace;
MatNullSpaceCreate(mpiCommGlobal, PETSC_FALSE, 1, &nullSpaceBasis, &matNullspace);
MatSetNullSpace(mat_feti, matNullspace);
KSPSetNullSpace(ksp_feti, matNullspace);
// MatSetNullSpace(mat_feti, matNullspace);
// KSPSetNullSpace(ksp_feti, matNullspace);
}
......@@ -836,6 +832,7 @@ namespace AMDiS {
MatScale(mat_lagrange_scaled, 0.5);
}
switch (fetiPreconditioner) {
case FETI_DIRICHLET:
TEST_EXIT_DBG(meshLevel == 0)
......@@ -1259,6 +1256,68 @@ namespace AMDiS {
VecRestoreArray(vec_sol_b, &localSolB);
VecRestoreArray(local_sol_primal, &localSolPrimal);
VecDestroy(&local_sol_primal);
}
void PetscSolverFeti::recoverInterfaceSolution(Vec& vecInterface, SystemVector &vec)
{
FUNCNAME("PetscSolverFeti::recoverInterfaceSolution()");
if (!enableStokesMode)
return;
vector<PetscInt> globalIsIndex, localIsIndex;
globalIsIndex.reserve(interfaceDofMap.getLocalDofs());
localIsIndex.reserve(interfaceDofMap.getLocalDofs());
int cnt = 0;
DofMap& dofMap = interfaceDofMap[fullInterface].getMap();
for (DofMap::iterator it = dofMap.begin();it != dofMap.end(); ++it) {
globalIsIndex.push_back(interfaceDofMap.getMatIndex(2, it->first));
localIsIndex.push_back(cnt++);
}
IS globalIs, localIs;
ISCreateGeneral(PETSC_COMM_SELF,
globalIsIndex.size(),
&(globalIsIndex[0]),
PETSC_USE_POINTER,
&globalIs);
ISCreateGeneral(PETSC_COMM_SELF,
localIsIndex.size(),
&(localIsIndex[0]),
PETSC_USE_POINTER,
&localIs);
Vec local_sol_interface;
VecCreateSeq(PETSC_COMM_SELF, localIsIndex.size(), &local_sol_interface);
VecScatter interfaceScatter;
VecScatterCreate(vecInterface, globalIs, local_sol_interface, localIs, &interfaceScatter);
VecScatterBegin(interfaceScatter, vecInterface, local_sol_interface,
INSERT_VALUES, SCATTER_FORWARD);
VecScatterEnd(interfaceScatter, vecInterface, local_sol_interface,
INSERT_VALUES, SCATTER_FORWARD);
ISDestroy(&globalIs);
ISDestroy(&localIs);
VecScatterDestroy(&interfaceScatter);
PetscScalar *localSolInterface;
VecGetArray(local_sol_interface, &localSolInterface);
// === And copy from PETSc local vectors to the DOF vectors. ===
cnt = 0;
DOFVector<double>& dofVec = *(vec.getDOFVector(2));
for (DofMap::iterator it = interfaceDofMap[fullInterface].getMap().begin();
it != interfaceDofMap[fullInterface].getMap().end(); ++it)
dofVec[it->first] = localSolInterface[cnt++];
VecRestoreArray(local_sol_interface, &localSolInterface);
VecDestroy(&local_sol_interface);
}
......@@ -1478,6 +1537,11 @@ namespace AMDiS {
createMatLagrange(feSpaces);
// === If required, run debug tests. ===
dbgMatrix();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp(feSpaces);
......@@ -1486,11 +1550,6 @@ namespace AMDiS {
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp(feSpaces);
// === If required, run debug tests. ===
dbgMatrix();
}
......@@ -1526,14 +1585,13 @@ namespace AMDiS {
primalDofMap.createVec(tmp_primal0);
primalDofMap.createVec(tmp_primal1);
Vec tmp_lagrange;
Vec tmp_lagrange, tmp_interface;
MatGetVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange);
// === Create RHS and solution vectors. ===
Vec vecRhs, vecSol;
Vec vecRhsLagrange, vecRhsInterface, vecSolLagrange, vecSolInterface;
Vec vecRhsLagrange, vecSolLagrange, vecRhsInterface, vecSolInterface;
if (!enableStokesMode) {
MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
......@@ -1544,14 +1602,22 @@ namespace AMDiS {
MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
interfaceDofMap.createVec(tmp_interface);
interfaceDofMap.createVec(vecRhsInterface);
interfaceDofMap.createVec(vecSolInterface);
Vec vecRhsArray[2] = {vecRhsInterface, vecRhsLagrange};
Vec vecRhsArray[2] = {vecRhsInterface, vecRhsLagrange};
VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecRhsArray, &vecRhs);
Vec vecSolArray[2] = {vecSolInterface, vecSolLagrange};
Vec vecSolArray[2] = {vecSolInterface, vecSolLagrange};
VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecSolArray, &vecSol);
VecAssemblyBegin(vecRhs);
VecAssemblyEnd(vecRhs);
VecAssemblyBegin(vecSol);
VecAssemblyEnd(vecSol);
}
......@@ -1559,33 +1625,54 @@ namespace AMDiS {
double wtime = MPI::Wtime();
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vecRhs = L * inv(K_BB) * f_B
subdomain->solveGlobal(subdomain->getVecRhsInterior(), tmp_b0);
MatMult(mat_lagrange, tmp_b0, vecRhsLagrange);
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal0);
if (!enableStokesMode) {
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vecRhs = L * inv(K_BB) * f_B
subdomain->solveGlobal(subdomain->getVecRhsInterior(), tmp_b0);
MatMult(mat_lagrange, tmp_b0, vecRhsLagrange);
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_B
VecAXPBY(tmp_primal0, 1.0, -1.0, subdomain->getVecRhsCoarse());
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal0);
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_B
VecAXPBY(tmp_primal0, 1.0, -1.0, subdomain->getVecRhsCoarse());
double wtime2 = MPI::Wtime();
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
if (printTimings) {
MSG("FETI-DP timing 09a: %.5f seconds (create rhs vector)\n",
MPI::Wtime() - wtime2);
double wtime2 = MPI::Wtime();
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
if (printTimings) {
MSG("FETI-DP timing 09a: %.5f seconds (create rhs vector)\n",
MPI::Wtime() - wtime2);
}
MatMult(subdomain->getMatInteriorCoarse(), tmp_primal0, tmp_b0);
subdomain->solveGlobal(tmp_b0, tmp_b0);
MatMult(mat_lagrange, tmp_b0, tmp_lagrange);
VecAXPBY(vecRhsLagrange, -1.0, 1.0, tmp_lagrange);
} else {
subdomain->solveGlobal(subdomain->getVecRhsInterior(), tmp_b0);
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal0);
VecAXPBY(tmp_primal0, 1.0, -1.0, subdomain->getVecRhsCoarse());
double wtime2 = MPI::Wtime();
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
if (printTimings) {
MSG("FETI-DP timing 09a: %.5f seconds (create rhs vector)\n",
MPI::Wtime() - wtime2);
}
MatMult(subdomain->getMatInteriorCoarse(), tmp_primal0, tmp_b1);
subdomain->solveGlobal(tmp_b1, tmp_b1);
VecAXPY(tmp_b0, -1.0, tmp_b1);
MatMult(mat_lagrange, tmp_b0, vecRhsLagrange);
MatMult(subdomain->getMatCoarseInterior(1), tmp_b0, vecRhsInterface);
MatMult(subdomain->getMatCoarse(1, 0), tmp_primal0, tmp_interface);
VecAXPY(vecRhsInterface, 1.0, tmp_interface);
}
//
MatMult(subdomain->getMatInteriorCoarse(), tmp_primal0, tmp_b0);
subdomain->solveGlobal(tmp_b0, tmp_b0);
MatMult(mat_lagrange, tmp_b0, tmp_lagrange);
//
VecAXPBY(vecRhsLagrange, -1.0, 1.0, tmp_lagrange);
if (printTimings) {
MPI::COMM_WORLD.Barrier();
MSG("FETI-DP timing 09: %.5f seconds (create rhs vector)\n",
......@@ -1647,6 +1734,11 @@ namespace AMDiS {
// === And recover AMDiS solution vectors. ===
recoverSolution(tmp_b0, tmp_primal0, vec);
if (enableStokesMode)
recoverInterfaceSolution(vecSolInterface, vec);
// === Print timings and delete data. ===
if (printTimings) {
MPI::COMM_WORLD.Barrier();
......@@ -1661,6 +1753,12 @@ namespace AMDiS {
VecDestroy(&tmp_lagrange);
VecDestroy(&tmp_primal0);
VecDestroy(&tmp_primal1);
if (enableStokesMode) {
VecDestroy(&tmp_interface);
VecDestroy(&vecRhsInterface);
VecDestroy(&vecSolInterface);
}
}
......
......@@ -160,6 +160,9 @@ namespace AMDiS {
Vec &vec_sol_primal,
SystemVector &vec);
void recoverInterfaceSolution(Vec& vecInterface,
SystemVector &vec);
/** \brief
* Solves the FETI-DP system globally, thus without reducing it to the
* Lagrange multipliers. This should be used for debugging only to test
......
......@@ -712,7 +712,6 @@ namespace AMDiS {
}
} else {
// The DOF index is not periodic.
VecSetValue(vecInterior, index, *dofIt, ADD_VALUES);
}
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment