Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 5671aaf5 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed problem with memory usage in FETI-DP.

parent 915a3500
......@@ -57,7 +57,7 @@ namespace AMDiS {
MatMult(data->subSolver->getMatCoarseInt(), data->tmp_vec_b, data->tmp_vec_primal);
KSPSolve(*(data->ksp_schur_primal), data->tmp_vec_primal, data->tmp_vec_primal);
MatMult(data->subSolver->getMatIntCoarse(), data->tmp_vec_primal, data->tmp_vec_b);
MatMult(data->subSolver->getMatIntCoarse(), data->tmp_vec_primal, data->tmp_vec_b);
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_lagrange), data->tmp_vec_b, y);
......@@ -1368,55 +1368,6 @@ namespace AMDiS {
subDomainSolver->solveGlobal(subDomainSolver->getRhsInterior(), tmp_b0);
MatMult(mat_lagrange, tmp_b0, vec_rhs);
#if 1
PetscViewer matview;
PetscViewerBinaryOpen(mpiComm, "mat_lag.dat", FILE_MODE_WRITE, &matview);
MatView(mat_lagrange, matview);
PetscViewerDestroy(&matview);
ParallelDebug::writeDebugFile(vec.getDOFVector(0)->getFeSpace(),
lagrangeMap,
"lag", "dat");
if (meshLevel == 1) {
MeshLevelData& levelData = meshDistributor->getMeshLevelData();
DofMap &m = localDofMap[vec.getDOFVector(0)->getFeSpace()].getMap();
int groupRowsInterior = 0;
if (levelData.getMpiComm(meshLevel).Get_rank() == 0)
groupRowsInterior = localDofMap[vec.getDOFVector(0)->getFeSpace()].nOverallDofs;
int rStart, nGlobal;
mpi::getDofNumbering(mpiComm, groupRowsInterior, rStart, nGlobal);
int tmp = 0;
if (levelData.getMpiComm(meshLevel).Get_rank() == 0)
tmp = rStart;
int a = 0;
levelData.getMpiComm(meshLevel).Allreduce(&tmp, &a, 1, MPI_INT, MPI_SUM);
for (DofMap::iterator it = m.begin(); it != m.end(); it++)
it->second.global += a;
} else {
int groupRowsInterior = localDofMap[vec.getDOFVector(0)->getFeSpace()].nRankDofs;
int rStart, nGlobal;
mpi::getDofNumbering(mpiComm, groupRowsInterior, rStart, nGlobal);
DofMap &m = localDofMap[vec.getDOFVector(0)->getFeSpace()].getMap();
for (DofMap::iterator it = m.begin(); it != m.end(); it++)
it->second.global = it->second.local + rStart;
}
ParallelDebug::writeDebugFile(vec.getDOFVector(0)->getFeSpace(),
localDofMap,
"interior", "dat");
// MPI::Finalize();
// exit(0);
#endif
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
MatMult(subDomainSolver->getMatCoarseInt(), tmp_b0, tmp_primal0);
......@@ -1495,6 +1446,8 @@ namespace AMDiS {
destroySchurPrimalKsp();
destroyFetiKsp();
subDomainSolver->destroyMatrixData();
}
......
......@@ -41,14 +41,6 @@ namespace AMDiS {
tmp = rStartInterior;
mpiCommInterior.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
/*
MSG("COMM TEST: %d %d %d %d %d\n",
mpiCommInterior.Get_size(),
interiorMap->getRankDofs(),
interiorMap->getOverallDofs(),
nGlobalOverallInterior, rStartInterior);
*/
}
}
......@@ -268,52 +260,6 @@ namespace AMDiS {
MatView(matIntCoarse, matview);
PetscViewerDestroy(&matview);
}
// MPI::Finalize();
// exit(0);
#if 0
PetscViewer matview;
PetscViewerBinaryOpen(mpiCommCoarseSpace, "mat_primal.dat",
FILE_MODE_WRITE, &matview);
MatView(matCoarseCoarse, matview);
PetscViewerDestroy(&matview);
ParallelDebug::writeDebugFile(feSpaces[0],
*coarseSpaceMap,
"coarsespace", "dat");
#endif
#if 0
if (MPI::COMM_WORLD.Get_rank() == 4 ||
MPI::COMM_WORLD.Get_rank() == 5 ||
MPI::COMM_WORLD.Get_rank() == 6 ||
MPI::COMM_WORLD.Get_rank() == 7) {
PetscViewerBinaryOpen(mpiCommInterior, "mat_interior.dat",
FILE_MODE_WRITE, &matview);
MatView(matIntInt, matview);
PetscViewerDestroy(&matview);
ParallelDebug::writeDebugFile(feSpaces[0],
*interiorMap,
"interiorspace", "dat");
}
#endif
#if 0
if (MPI::COMM_WORLD.Get_rank() == 1) {
PetscViewerBinaryOpen(mpiCommInterior, "mat_interior.dat",
FILE_MODE_WRITE, &matview);
MatView(matIntInt, matview);
PetscViewerDestroy(&matview);
ParallelDebug::writeDebugFile(feSpaces[0],
*interiorMap,
"interiorspace", "dat");
}
#endif
}
......@@ -351,58 +297,6 @@ namespace AMDiS {
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
#if 0
PetscViewer matview;
PetscViewerBinaryOpen(mpiCommCoarseSpace, "vec_interior.dat",
FILE_MODE_WRITE, &matview);
VecView(rhsInterior, matview);
PetscViewerDestroy(&matview);
#if 1
{
DofMap &m = (*interiorMap)[vec->getDOFVector(0)->getFeSpace()].getMap();
int groupRowsInterior = 0;
if (mpiCommInterior.Get_rank() == 0)
groupRowsInterior =
(*interiorMap)[vec->getDOFVector(0)->getFeSpace()].nOverallDofs;
int rStart, nGlobal;
mpi::getDofNumbering(mpiCommCoarseSpace, groupRowsInterior, rStart, nGlobal);
int tmp = 0;
if (mpiCommInterior.Get_rank() == 0)
tmp = rStart;
int a = 0;
mpiCommInterior.Allreduce(&tmp, &a, 1, MPI_INT, MPI_SUM);
for (DofMap::iterator it = m.begin(); it != m.end(); it++)
it->second.global += a;
}
#else
{
int groupRowsInterior = 0;
groupRowsInterior =
(*interiorMap)[vec->getDOFVector(0)->getFeSpace()].nOverallDofs;
int rStart, nGlobal;
mpi::getDofNumbering(mpiCommCoarseSpace, groupRowsInterior, rStart, nGlobal);
DofMap &m = (*interiorMap)[vec->getDOFVector(0)->getFeSpace()].getMap();
for (DofMap::iterator it = m.begin(); it != m.end(); it++)
it->second.global = it->second.local +
(*interiorMap)[vec->getDOFVector(0)->getFeSpace()].rStartDofs;
}
#endif
ParallelDebug::writeDebugFile(vec->getDOFVector(0)->getFeSpace(),
*interiorMap,
"interior", "dat");
exit(0);
#endif
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment