Commit 06dc55fb authored by Thomas Witkowski's avatar Thomas Witkowski

Removed UMFPACK library from AMDiS.

parent 7841be47
......@@ -332,7 +332,7 @@ namespace AMDiS {
}
void processMemUsage(double& vm_usage, double& resident_set)
void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte)
{
using std::ios_base;
using std::ifstream;
......@@ -361,8 +361,13 @@ namespace AMDiS {
// in case x86-64 is configured to use 2MB pages
long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024;
vm_usage = vsize / 1024.0;
vm_usage = vsize / 1024.0;
resident_set = rss * page_size_kb;
if (inMegaByte) {
vm_usage /= 1024.0;
resident_set /= 1024.0;
}
}
......
......@@ -109,7 +109,7 @@ namespace AMDiS {
void waitSec(int seconds);
void processMemUsage(double& vm_usage, double& resident_set);
void processMemUsage(double& vm_usage, double& resident_set, bool inMegaByte = true);
/// Content comparision of two pointers. Used e.g. for find_if
template<typename T>
......
......@@ -1213,18 +1213,19 @@ namespace AMDiS {
Parameters::get(name + "->check", check);
Parameters::get(name + "->preserve coarse dofs", preserveCoarseDOFs);
if (macroFilename.length()) {
// In parallel computations, check if a finer macro mesh is required.
TEST_EXIT(macroFilename.length())
("No mesh defined for parameter %s->macro file name !\n", name.c_str());
// In parallel computations, check if a finer macro mesh is required.
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
checkParallelMacroFile(macroFilename, periodicFilename, check);
checkParallelMacroFile(macroFilename, periodicFilename, check);
#endif
macroFileInfo =
MacroReader::readMacro(macroFilename, this, periodicFilename, check);
macroFileInfo =
MacroReader::readMacro(macroFilename, this, periodicFilename, check);
if (!valueFilename.length())
clearMacroFileInfo();
}
if (!valueFilename.length())
clearMacroFileInfo();
initialized = true;
}
......@@ -1251,11 +1252,18 @@ namespace AMDiS {
localAdmin->setNumberOfDofs(admin[0]->getNumberOfDofs());
testMesh.addDOFAdmin(localAdmin);
MSG("START READ FILE %s\n", macroFilename);
MacroInfo *testMacroInfo =
MacroReader::readMacro(macroFilename, &testMesh, periodicFilename, check);
testMacroInfo->clear();
delete testMacroInfo;
MSG("TEST MESH HAS %d ELEMENTS\n", testMesh.getNumberOfMacros());
MPI::Finalize();
exit(0);
// === Check the mesh structure. ===
......
......@@ -25,6 +25,8 @@ namespace AMDiS {
int nElementsPerBlock = (mesh->getDim() == 2 ? 2 : 6);
MSG("ELS_PER_BLOCK = %d\n", nElementsPerBlock);
TraverseStack stack;
ElInfo *elInfo = stack.traverseFirst(mesh, 0, Mesh::CALL_EL_LEVEL);
while (elInfo) {
......@@ -33,6 +35,12 @@ namespace AMDiS {
int elInRank = elIndex / nElementsPerBlock;
TEST_EXIT_DBG(elInRank < mpiSize)("Should not happen!\n");
if (elInRank == mpiRank) {
MSG("EL %d IS MY!\n", elIndex);
} else {
MSG("EL %d IS IN RANK %d\n", elIndex, elInRank);
}
elementInRank[elIndex] = (elInRank == mpiRank);
partitionMap[elIndex] = elInRank;
......
......@@ -70,16 +70,11 @@ namespace AMDiS {
double wtime = MPI::Wtime();
double vm, rss;
processMemUsage(vm, rss);
vm /= 1024.0;
rss /= 1024.0;
processMemUsage(vm, rss);
MSG("STAGE 1\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
petscSolver->setMeshDistributor(meshDistributor);
......@@ -87,28 +82,20 @@ namespace AMDiS {
processMemUsage(vm, rss);
vm /= 1024.0;
rss /= 1024.0;
MSG("STAGE 2\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
petscSolver->solvePetscMatrix(*solution, adaptInfo);
processMemUsage(vm, rss);
vm /= 1024.0;
rss /= 1024.0;
MSG("STAGE 3\n");
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
......
......@@ -856,6 +856,10 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::fillPetscMatrix()");
// double vm, mem;
// processMemUsage(vm, mem);
// MSG("MEM INFO 1 = %f\n", mem);
nComponents = vec->getSize();
// === Create all sets and indices. ===
......@@ -874,45 +878,48 @@ namespace AMDiS {
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankB, nRowsRankB, nRowsOverallB, nRowsOverallB,
100, PETSC_NULL, 100, PETSC_NULL, &mat_b_b);
30, PETSC_NULL, 0, PETSC_NULL, &mat_b_b);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankPrimal,
nRowsOverallPrimal, nRowsOverallPrimal,
10, PETSC_NULL, 10, PETSC_NULL, &mat_primal_primal);
30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_primal);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankB, nRowsRankPrimal,
nRowsOverallB, nRowsOverallPrimal,
100, PETSC_NULL, 100, PETSC_NULL, &mat_b_primal);
30, PETSC_NULL, 30, PETSC_NULL, &mat_b_primal);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankB,
nRowsOverallPrimal, nRowsOverallB,
100, PETSC_NULL, 100, PETSC_NULL, &mat_primal_b);
30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b);
// === Create matrices for FETI-DP preconditioner. ===
if (fetiPreconditioner != FETI_NONE)
MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsDual, nRowsDual, 100, PETSC_NULL,
nRowsDual, nRowsDual, 30, PETSC_NULL,
&mat_duals_duals);
if (fetiPreconditioner == FETI_DIRICHLET) {
MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsInterior, nRowsInterior, 100, PETSC_NULL,
nRowsInterior, nRowsInterior, 30, PETSC_NULL,
&mat_interior_interior);
MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsInterior, nRowsDual, 100, PETSC_NULL,
nRowsInterior, nRowsDual, 30, PETSC_NULL,
&mat_interior_duals);
MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsDual, nRowsInterior, 100, PETSC_NULL,
nRowsDual, nRowsInterior, 30, PETSC_NULL,
&mat_duals_interior);
}
// processMemUsage(vm, mem);
// MSG("MEM INFO 2 = %f\n", mem);
// === Prepare traverse of sequentially created matrices. ===
......@@ -1121,7 +1128,9 @@ namespace AMDiS {
}
}
}
// processMemUsage(vm, mem);
// MSG("MEM INFO 3 = %f\n", mem);
// === Start global assembly procedure. ===
......@@ -1137,6 +1146,8 @@ namespace AMDiS {
MatAssemblyBegin(mat_primal_b, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_primal_b, MAT_FINAL_ASSEMBLY);
// processMemUsage(vm, mem);
// MSG("MEM INFO 4 = %f\n", mem);
// === Start global assembly procedure for preconditioner matrices. ===
......@@ -1157,6 +1168,9 @@ namespace AMDiS {
}
// processMemUsage(vm, mem);
// MSG("MEM INFO 5 = %f\n", mem);
// === Create and fill PETSc's right hand side vectors. ===
VecCreate(PETSC_COMM_WORLD, &f_b);
......@@ -1196,20 +1210,31 @@ namespace AMDiS {
VecAssemblyBegin(f_primal);
VecAssemblyEnd(f_primal);
// processMemUsage(vm, mem);
// MSG("MEM INFO 6 = %f\n", mem);
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange();
// processMemUsage(vm, mem);
// MSG("MEM INFO 7 = %f\n", mem);
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp();
// processMemUsage(vm, mem);
// MSG("MEM INFO 8 = %f\n", mem);
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp();
// processMemUsage(vm, mem);
// MSG("MEM INFO 9 = %f\n", mem);
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment