Commit 41a2b884 authored by Thomas Witkowski's avatar Thomas Witkowski

Fixed some parallel bugs.

parent 11446cdf
......@@ -329,9 +329,17 @@ namespace AMDiS {
{
FUNCNAME("debug::printInfoByDof()");
WorldVector<double> coords;
feSpace->getMesh()->getDofIndexCoords(dof, feSpace, coords);
Element *el = getDofIndexElement(feSpace, dof);
Element *parEl = getLevel0ParentElement(feSpace->getMesh(), el);
if (coords.getSize() == 2)
MSG("[DBG] DOF-INFO: dof = %d coords: %e %e\n", dof, coords[0], coords[1]);
else
MSG("[DBG] DOF-INFO: dof = %d coords: %e %e %e\n", dof, coords[0], coords[1], coords[2]);
MSG("[DBG] DOF-INFO: dof = %d elidx = %d pelidx = %d\n",
dof, el->getIndex(), parEl->getIndex());
......@@ -341,6 +349,7 @@ namespace AMDiS {
while (elInfo) {
if (elInfo->getElement()->getIndex() == parEl->getIndex()) {
MSG("[DBG] EL INFO TO %d: type = %d\n", parEl->getIndex(), elInfo->getType());
break;
}
elInfo = stack.traverseNext(elInfo);
......
......@@ -477,8 +477,6 @@ namespace AMDiS {
}
#if 0
// === Do the same for the periodic boundaries. ===
if (periodic.size() > 0) {
......@@ -533,8 +531,6 @@ namespace AMDiS {
}
}
} // periodicBoundary.boundary.size() > 0
#endif
}
......
......@@ -54,6 +54,8 @@ namespace AMDiS {
const std::set<int>& perAsc,
vector<int>& mappedDofs)
{
FUNCNAME("PeriodicMap::mapDof()");
mappedDofs.clear();
mappedDofs.push_back(globalDofIndex);
......@@ -61,13 +63,9 @@ namespace AMDiS {
it != perAsc.end(); ++it) {
int nDofs = static_cast<int>(mappedDofs.size());
for (int i = 0; i < nDofs; i++) {
TEST_EXIT_DBG(isPeriodic(feSpace, *it, mappedDofs[i]))
("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
*it, mappedDofs[i]);
mappedDofs.push_back(map(feSpace, *it, mappedDofs[i]));
}
for (int i = 0; i < nDofs; i++)
if (isPeriodic(feSpace, *it, mappedDofs[i]))
mappedDofs.push_back(map(feSpace, *it, mappedDofs[i]));
}
}
......@@ -84,6 +82,7 @@ namespace AMDiS {
for (std::set<int>::iterator it = perAsc.begin();
it != perAsc.end(); ++it) {
int nDofs = static_cast<int>(mappedDofs.size());
for (int i = 0; i < nDofs; i++) {
int perRowDof = 0;
if (isPeriodic(rowFeSpace, *it, mappedDofs[i].first))
......
......@@ -14,7 +14,7 @@
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
#include "parallel/ParallelDofMapping.h"
namespace AMDiS {
......@@ -24,7 +24,6 @@ namespace AMDiS {
: meshDistributor(NULL),
subdomainLevel(0),
interiorMap(NULL),
coarseSpaceMap(NULL),
mpiRank(-1),
kspPrefix(""),
removeRhsNullspace(false),
......@@ -43,7 +42,7 @@ namespace AMDiS {
void PetscSolver::setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component = -1)
int component)
{
FUNCNAME("PetscSolver::setCoarseSpaceDofMapping()");
......
......@@ -251,7 +251,7 @@ namespace AMDiS {
/// Parallel DOF mapping of the (optional) coarse space. Allows to define
/// different coarse spaces for different components.
map<int, ParallelDofMapping*> coarseSpaceMap;
std::map<int, ParallelDofMapping*> coarseSpaceMap;
int mpiRank;
......@@ -259,11 +259,8 @@ namespace AMDiS {
MPI::Intracomm mpiCommLocal;
vector<vector<mat> >
/// Petsc's matrix structure.
//Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
......
......@@ -409,7 +409,8 @@ namespace AMDiS {
("Should not happen!\n");
}
subdomain->setDofMapping(&localDofMap, &primalDofMap);
subdomain->setDofMapping(&localDofMap);
subdomain->setCoarseSpaceDofMapping(&primalDofMap);
if (printTimings) {
MPI::COMM_WORLD.Barrier();
......
......@@ -153,9 +153,12 @@ namespace AMDiS {
localMatrix);
if (coarseSpaceMap.size()) {
MSG("NO COARSE SPACE NNZ!\n");
/*
nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
*/
}
}
......@@ -173,8 +176,8 @@ namespace AMDiS {
if (coarseSpaceMap.size()) {
int nRowsRankCoarse = coarseSpaceMap->getRankDofs();
int nRowsOverallCoarse = coarseSpaceMap->getOverallDofs();
int nRowsRankCoarse = coarseSpaceMap[0]->getRankDofs();
int nRowsOverallCoarse = coarseSpaceMap[0]->getOverallDofs();
MatCreateAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankCoarse,
......@@ -369,8 +372,8 @@ namespace AMDiS {
if (coarseSpaceMap.size())
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
coarseSpaceMap[0]->getRankDofs(),
coarseSpaceMap[0]->getOverallDofs(),
&rhsCoarseSpace);
TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment