Liebe Gitlab-Nutzerin, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit 26dceb38 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed our FETI-DP bug...

parent 7c6dd8a4
......@@ -44,11 +44,7 @@ namespace AMDiS {
TEST_EXIT(argc >= 2)("No init file!\n");
if (string(argv[0]) != "ddt")
Parameters::init(string(argv[1]));
else
Parameters::init(string(argv[1]));
Parameters::init(string(argv[1]));
Parameters::readArgv(argc, argv);
}
......
......@@ -55,24 +55,17 @@ namespace AMDiS {
MatShellGetContext(mat, &ctx);
FetiData* data = static_cast<FetiData*>(ctx);
// tmp_vec_b0 = inv(K_BB) trans(L) x
MatMultTranspose(*(data->mat_lagrange), x, data->tmp_vec_b0);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b0, data->tmp_vec_b0);
// tmp_vec_b1 = inv(K_BB) K_BPi inv(S_PiPi) K_PiB tmp_vec_b0
MatMult(*(data->mat_primal_b), data->tmp_vec_b0, data->tmp_vec_primal);
MatMultTranspose(*(data->mat_lagrange), x, data->tmp_vec_b);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_lagrange), data->tmp_vec_b, data->tmp_vec_lagrange);
double first = MPI::Wtime();
MatMult(*(data->mat_primal_b), data->tmp_vec_b, data->tmp_vec_primal);
KSPSolve(*(data->ksp_schur_primal), data->tmp_vec_primal, data->tmp_vec_primal);
PetscSolverFeti::fetiStatistics.nSchurPrimalSolve++;
PetscSolverFeti::fetiStatistics.timeSchurPrimalSolve +=
MPI::Wtime() - first;
MatMult(*(data->mat_b_primal), data->tmp_vec_primal, data->tmp_vec_b1);
MatMult(*(data->mat_b_primal), data->tmp_vec_primal, data->tmp_vec_b);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_lagrange), data->tmp_vec_b, y);
// y = L (tmp_vec_b0 + tmp_vec_b1)
VecAXPBY(data->tmp_vec_b0, 1.0, 1.0, data->tmp_vec_b1);
MatMult(*(data->mat_lagrange), data->tmp_vec_b0, y);
VecAXPBY(y, 1.0, 1.0, data->tmp_vec_lagrange);
PetscSolverFeti::fetiStatistics.nFetiApply++;
......@@ -766,10 +759,10 @@ namespace AMDiS {
VecCreateMPI(PETSC_COMM_WORLD,
nRankB * nComponents, nOverallB * nComponents,
&(fetiData.tmp_vec_b0));
VecCreateMPI(PETSC_COMM_WORLD,
nRankB * nComponents, nOverallB * nComponents,
&(fetiData.tmp_vec_b1));
&(fetiData.tmp_vec_b));
VecCreateMPI(PETSC_COMM_WORLD,
nRankLagrange * nComponents, nOverallLagrange * nComponents,
&(fetiData.tmp_vec_lagrange));
VecCreateMPI(PETSC_COMM_WORLD,
nRankPrimals * nComponents, nOverallPrimals * nComponents,
&(fetiData.tmp_vec_primal));
......@@ -857,8 +850,8 @@ namespace AMDiS {
fetiData.fetiSolver = NULL;
fetiData.ksp_schur_primal = PETSC_NULL;
VecDestroy(&fetiData.tmp_vec_b0);
VecDestroy(&fetiData.tmp_vec_b1);
VecDestroy(&fetiData.tmp_vec_b);
VecDestroy(&fetiData.tmp_vec_lagrange);
VecDestroy(&fetiData.tmp_vec_primal);
MatDestroy(&mat_feti);
KSPDestroy(&ksp_feti);
......@@ -904,12 +897,6 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::recoverSolution()");
PetscScalar *ttt;
VecGetArray(vec_sol_primal, &ttt);
for (int i = 0; i < nRankPrimals; i++)
MSG("PRIM VAL = %f\n", ttt[i * 3 + 1]);
VecRestoreArray(vec_sol_primal, &ttt);
// === Get local part of the solution for B variables. ===
PetscScalar *localSolB;
......@@ -928,8 +915,6 @@ namespace AMDiS {
for (DofMapping::iterator it = globalPrimalIndex.begin();
it != globalPrimalIndex.end(); ++it) {
for (int i = 0; i < nComponents; i++) {
MSG("COPY GLOBAL PRIM %d of COMP %d TO LOCAL TMP %d\n",
it->second, i, counter);
globalIsIndex.push_back(it->second * nComponents + i);
localIsIndex.push_back(counter++);
}
......@@ -966,10 +951,6 @@ namespace AMDiS {
PetscScalar *localSolPrimal;
VecGetArray(local_sol_primal, &localSolPrimal);
for (int i = 0; i < globalPrimalIndex.size(); i++)
MSG("TEST VAL %f\n", localSolPrimal[i * 3 + 1]);
// === And copy from PETSc local vectors to the DOF vectors. ===
for (int i = 0; i < nComponents; i++) {
......@@ -984,17 +965,11 @@ namespace AMDiS {
int counter = 0;
for (DofMapping::iterator it = globalPrimalIndex.begin();
it != globalPrimalIndex.end(); ++it) {
if (i == 1)
MSG("AND SET THE VAL OF DOF %d TO VAL %f\n",
it->first,
localSolPrimal[counter * nComponents + i]);
dofVec[it->first] = localSolPrimal[counter * nComponents + i];
counter++;
}
}
MSG("VAL BOUNDS: %f %f\n", vec.getDOFVector(1)->min(), vec.getDOFVector(1)->max());
VecRestoreArray(vec_sol_b, &localSolB);
VecRestoreArray(local_sol_primal, &localSolPrimal);
VecDestroy(&local_sol_primal);
......@@ -1409,6 +1384,204 @@ namespace AMDiS {
}
void PetscSolverFeti::solveFetiMatrix(SystemVector &vec)
{
FUNCNAME("PetscSolverFeti::solveFetiMatrix()");
int nOverallNest =
(nOverallB + nOverallPrimals + nOverallLagrange) * nComponents;
Mat mat_lagrange_transpose;
MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &mat_lagrange_transpose);
Mat A_global;
MatCreateMPIAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE,
nOverallNest, nOverallNest, 30, PETSC_NULL, 30, PETSC_NULL,
&A_global);
Vec A_global_rhs, A_global_sol;
MatGetVecs(A_global, &A_global_rhs, &A_global_sol);
PetscInt nCols;
const PetscInt *cols;
const PetscScalar *vals;
// === mat_b_b ===
for (int i = 0; i < nRankB * nComponents; i++) {
int rowIndex = rStartB * nComponents + i;
MatGetRow(mat_b_b, i, &nCols, &cols, &vals);
MatSetValues(A_global, 1, &rowIndex, nCols, cols, vals, ADD_VALUES);
MatRestoreRow(mat_b_b, rowIndex, &nCols, &cols, &vals);
}
PetscScalar *g_f_b;
VecGetArray(f_b, &g_f_b);
for (int i = 0; i < nRankB * nComponents; i++)
VecSetValue(A_global_rhs, rStartB * nComponents + i, g_f_b[i], INSERT_VALUES);
VecRestoreArray(f_b, &g_f_b);
// === mat_primal_primal ===
for (int i = 0; i < nRankPrimals * nComponents; i++) {
int rowIndex = rStartPrimals * nComponents + i;
MatGetRow(mat_primal_primal, rowIndex, &nCols, &cols, &vals);
int rowIndexA = nOverallB * nComponents + rowIndex;
vector<int> colsA(nCols);
for (int j = 0; j < nCols; j++)
colsA[j] = nOverallB * nComponents + cols[j];
MatSetValues(A_global, 1, &rowIndexA, nCols, &(colsA[0]), vals, ADD_VALUES);
MatRestoreRow(mat_primal_primal, rowIndex, &nCols, &cols, &vals);
}
PetscScalar *g_f_primal;
VecGetArray(f_primal, &g_f_primal);
for (int i = 0; i < nRankPrimals * nComponents; i++)
VecSetValue(A_global_rhs,
(nOverallB + rStartPrimals) * nComponents + i, g_f_primal[i],
INSERT_VALUES);
VecRestoreArray(f_primal, &g_f_primal);
// === mat_b_primal ===
for (int i = 0; i < nRankB * nComponents; i++) {
int rowIndex = rStartB * nComponents + i;
MatGetRow(mat_b_primal, rowIndex, &nCols, &cols, &vals);
vector<int> colsA(nCols);
for (int j = 0; j < nCols; j++)
colsA[j] = nOverallB * nComponents + cols[j];
MatSetValues(A_global, 1, &rowIndex, nCols, &(colsA[0]), vals, ADD_VALUES);
MatRestoreRow(mat_b_primal, rowIndex, &nCols, &cols, &vals);
}
// === mat_primal_b ===
for (int i = 0; i < nRankPrimals * nComponents; i++) {
int rowIndex = rStartPrimals * nComponents + i;
MatGetRow(mat_primal_b, rowIndex, &nCols, &cols, &vals);
int rowIndexA = nOverallB * nComponents + rowIndex;
MatSetValues(A_global, 1, &rowIndexA, nCols, cols, vals, ADD_VALUES);
MatRestoreRow(mat_primal_b, rowIndex, &nCols, &cols, &vals);
}
// === mat_lagrange ===
for (int i = 0; i < nRankLagrange * nComponents; i++) {
int rowIndex = rStartLagrange * nComponents + i;
MatGetRow(mat_lagrange, rowIndex, &nCols, &cols, &vals);
int rowIndexA = (nOverallB + nOverallPrimals) * nComponents + rowIndex;
MatSetValues(A_global, 1, &rowIndexA, nCols, cols, vals, ADD_VALUES);
MatRestoreRow(mat_lagrange, rowIndex, &nCols, &cols, &vals);
}
// === mat_lagrange_transpose ===
for (int i = 0; i < nRankB * nComponents; i++) {
int rowIndex = rStartB * nComponents + i;
MatGetRow(mat_lagrange_transpose, rowIndex, &nCols, &cols, &vals);
vector<int> colsA(nCols);
for (int j = 0; j < nCols; j++)
colsA[j] = (nOverallB + nOverallPrimals) * nComponents + cols[j];
MatSetValues(A_global, 1, &rowIndex, nCols, &(colsA[0]), vals, ADD_VALUES);
MatRestoreRow(mat_lagrange_transpose, rowIndex, &nCols, &cols, &vals);
}
MatAssemblyBegin(A_global, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(A_global, MAT_FINAL_ASSEMBLY);
VecAssemblyBegin(A_global_rhs);
VecAssemblyEnd(A_global_rhs);
// === Create solver and solve the overall FETI system. ===
KSP ksp;
KSPCreate(PETSC_COMM_WORLD, &ksp);
KSPSetOperators(ksp, A_global, A_global, SAME_NONZERO_PATTERN);
KSPSetFromOptions(ksp);
KSPSolve(ksp, A_global_rhs, A_global_sol);
Vec u_b, u_primal;
VecDuplicate(f_b, &u_b);
VecDuplicate(f_primal, &u_primal);
vector<PetscInt> localBIndex, globalBIndex;
localBIndex.reserve(nRankB * nComponents);
globalBIndex.reserve(nRankB * nComponents);
for (int i = 0; i < nRankB * nComponents; i++) {
localBIndex.push_back(rStartB * nComponents + i);
globalBIndex.push_back(rStartB * nComponents + i);
}
IS localBIs, globalBIs;
ISCreateGeneral(PETSC_COMM_SELF,
localBIndex.size(),
&(localBIndex[0]),
PETSC_USE_POINTER,
&localBIs);
ISCreateGeneral(PETSC_COMM_SELF,
globalBIndex.size(),
&(globalBIndex[0]),
PETSC_USE_POINTER,
&globalBIs);
VecScatter vecscat;
VecScatterCreate(A_global_sol, globalBIs, u_b, localBIs, &vecscat);
VecScatterBegin(vecscat, A_global_sol, u_b, INSERT_VALUES, SCATTER_FORWARD);
VecScatterEnd(vecscat, A_global_sol, u_b, INSERT_VALUES, SCATTER_FORWARD);
ISDestroy(&localBIs);
ISDestroy(&globalBIs);
VecScatterDestroy(&vecscat);
localBIndex.resize(0);
globalBIndex.resize(0);
localBIndex.reserve(nRankPrimals * nComponents);
globalBIndex.reserve(nRankPrimals * nComponents);
for (int i = 0; i < nRankPrimals * nComponents; i++) {
localBIndex.push_back(rStartPrimals * nComponents + i);
globalBIndex.push_back(nOverallB * nComponents + rStartPrimals * nComponents + i);
}
ISCreateGeneral(PETSC_COMM_SELF,
localBIndex.size(),
&(localBIndex[0]),
PETSC_USE_POINTER,
&localBIs);
ISCreateGeneral(PETSC_COMM_SELF,
globalBIndex.size(),
&(globalBIndex[0]),
PETSC_USE_POINTER,
&globalBIs);
VecScatterCreate(A_global_sol, globalBIs, u_primal, localBIs, &vecscat);
VecScatterBegin(vecscat, A_global_sol, u_primal, INSERT_VALUES, SCATTER_FORWARD);
VecScatterEnd(vecscat, A_global_sol, u_primal, INSERT_VALUES, SCATTER_FORWARD);
ISDestroy(&localBIs);
ISDestroy(&globalBIs);
VecScatterDestroy(&vecscat);
recoverSolution(u_b, u_primal, vec);
KSPDestroy(&ksp);
}
void PetscSolverFeti::solveReducedFetiMatrix(SystemVector &vec)
{
FUNCNAME("PetscSolverFeti::solveReducedFetiMatrix()");
......@@ -1543,9 +1716,14 @@ namespace AMDiS {
int debug = 0;
Parameters::get("parallel->debug feti", debug);
if (debug) {
solveFetiMatrix(vec);
} else {
// resetStatistics();
solveReducedFetiMatrix(vec);
solveReducedFetiMatrix(vec);
// printStatistics();
}
VtkWriter::writeFile(&vec, "test-before.vtu");
MeshDistributor::globalMeshDistributor->synchVector(vec);
......
......@@ -69,11 +69,14 @@ namespace AMDiS {
Mat *mat_lagrange;
/// Temporal vectors on the B variables.
Vec tmp_vec_b0, tmp_vec_b1;
Vec tmp_vec_b;
/// Temporal vector on the primal variables.
Vec tmp_vec_primal;
/// Temporal vector on the lagrange variables.
Vec tmp_vec_lagrange;
PetscSolverFeti *fetiSolver;
/// Pointer to the solver of the schur complement on the primal variables.
......@@ -239,6 +242,15 @@ namespace AMDiS {
Vec &vec_sol_primal,
SystemVector &vec);
/** \brief
* Solves the FETI-DP system globally, thus without reducing it to the
* Lagrange multipliers. This should be used for debugging only to test
* if the FETI-DP system is setup correctly.
*
* \param[out] vec Solution DOF vectors.
*/
void solveFetiMatrix(SystemVector &vec);
/** \brief
* Solves the FETI-DP system with reducing it first to the Lagrange
* multipliers. This is what one expects when using the FETI-DP methid :)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment