Commit f99a87c8 authored by Thomas Witkowski's avatar Thomas Witkowski

Does not work, so do not checkout this revision, but still have fun to commit this shit of code.

parent a1d45889
......@@ -557,7 +557,8 @@ namespace AMDiS {
for (std::set<int>::iterator it = dirichletDofs.begin();
it != dirichletDofs.end(); ++it)
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
if (dofMap->isRankDof(*it))
// if (dofMap->isRankDof(*it))
if (dofMap->isSet(*it))
#endif
ins[*it][*it] = 1.0;
}
......
......@@ -100,10 +100,10 @@ namespace AMDiS {
MSG("ndof = %d\n", ndof);
// space dimenstion
int ndim = 2;
int ndim = mesh->getDim();
// mesh dimension
int meshdim = 2;
int meshdim = mesh->getDim();
// global indes of subdomain
int isub = meshDistributor->getMpiRank();
......@@ -121,8 +121,10 @@ namespace AMDiS {
MSG("local nnods %d ndofs %d\n", nnods, ndofs);
int nVertices = mesh->getGeo(VERTEX);
// Length of array inet
int linet = nelems * 3;
int linet = nelems * nVertices;
// Local array with indices of nodes on each element
int inet[linet];
......@@ -132,8 +134,8 @@ namespace AMDiS {
("Should not happen!\n");
int localElIndex = mapElIndex[elInfo->getElement()->getIndex()];
for (int i = 0; i < 3; i++)
inet[localElIndex * 3 + i] = elInfo->getElement()->getDof(i, 0);
for (int i = 0; i < nVertices; i++)
inet[localElIndex * nVertices + i] = elInfo->getElement()->getDof(i, 0);
elInfo = stack.traverseNext(elInfo);
}
......@@ -141,7 +143,7 @@ namespace AMDiS {
// local array of number of nodes per element
int nnet[nelems];
for (int i = 0; i < nelems; i++)
nnet[i] = 3;
nnet[i] = nVertices;
// local array with number of DOFs per node.
int nndf[nnods];
......@@ -172,7 +174,7 @@ namespace AMDiS {
int lxyz1 = nnods;
int lxyz2 = 2;
int lxyz2 = mesh->getDim();
// local array with coordinates of nodes
double xyz[lxyz1 * lxyz2];
......@@ -185,6 +187,9 @@ namespace AMDiS {
xyz[i * nnods + j] = coordDofs[j][i];
}
// === Fill for dirichlet boundary conditions. ===
// local array of indices denoting dirichlet boundary data
int ifix[ndofs];
for (int i = 0; i < ndofs; i++)
......@@ -195,6 +200,20 @@ namespace AMDiS {
for (int i = 0; i < ndofs; i++)
fixv[i] = 0.0;
for (int i = 0; i < rhsVec->getSize(); i++) {
map<DegreeOfFreedom, double>& dcValues =
rhsVec->getDOFVector(i)->getDirichletValues();
for (map<DegreeOfFreedom, double>::iterator it = dcValues.begin();
it != dcValues.end(); ++it) {
int index = it->first * nComponents + i;
TEST_EXIT_DBG(index < ndofs)("Should not happen!\n");
ifix[index] = 1;
fixv[index] = it->second;
}
}
// local rhs data
double rhs[ndofs];
for (int i = 0; i < nComponents; i++) {
......@@ -233,6 +252,15 @@ namespace AMDiS {
// Matrix is assembled
int is_assembled_int = 0;
// Users constraints
double user_constraints = 0.0;
int luser_constraints1 = 0;
int luser_constraints2 = 0;
/*
string tmp="";
......@@ -318,13 +346,18 @@ namespace AMDiS {
&(j_sparse[0]),
&(a_sparse[0]),
&la,
&is_assembled_int);
&is_assembled_int,
&user_constraints,
&luser_constraints1,
&luser_constraints2);
int use_defaults_int = 1;
int parallel_division_int = 1;
int use_arithmetic_int = 0;
int use_adaptive_int = 0;
int use_user_constraint_int = 0;
// MSG("call to \"bddcml_setup_preconditioner\" with the following arguments (each in one line):\n");
// MSG(" %d\n", matrixtype);
// MSG(" %d\n", use_defaults_int);
......@@ -336,7 +369,8 @@ namespace AMDiS {
&use_defaults_int,
&parallel_division_int,
&use_arithmetic_int,
&use_adaptive_int);
&use_adaptive_int,
&use_user_constraint_int);
int method = 1;
double tol = 1.e-6;
......@@ -410,21 +444,11 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>(dmat->getBaseMatrix()),
cend = end<row>(dmat->getBaseMatrix()); cursor != cend; ++cursor) {
int rowIndex =
dofMap[feSpace][*cursor].global * nComponents + ithRowComponent;
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
int colIndex =
dofMap[feSpace][col(*icursor)].global * nComponents + ithColComponent;
double val = value(*icursor);
// i_sparse.push_back(rowIndex);
// j_sparse.push_back(colIndex);
i_sparse.push_back(*cursor * nComponents + ithRowComponent);
j_sparse.push_back(col(*icursor) * nComponents + ithColComponent);
a_sparse.push_back(val);
a_sparse.push_back(value(*icursor));
}
}
......
......@@ -476,7 +476,7 @@ namespace AMDiS {
// and to remove the periodic boundary conditions on these objects.
if (initialized) {
setRankDofs(probStat);
setRankDofs(probStat, dofMap);
removePeriodicBoundaryConditions(probStat);
}
}
......@@ -731,7 +731,8 @@ namespace AMDiS {
}
void MeshDistributor::setRankDofs(ProblemStatSeq *probStat)
void MeshDistributor::setRankDofs(ProblemStatSeq *probStat,
ParallelDofMapping &rankDofMap)
{
FUNCNAME("MeshDistributor::setRankDofs()");
......@@ -745,7 +746,7 @@ namespace AMDiS {
TEST_EXIT(find(feSpaces.begin(), feSpaces.end(), rowFeSpace) != feSpaces.end())
("Should not happen!\n");
probStat->getSystemMatrix(i, j)->setDofMap(dofMap[rowFeSpace]);
probStat->getSystemMatrix(i, j)->setDofMap(rankDofMap[rowFeSpace]);
}
......@@ -760,8 +761,8 @@ namespace AMDiS {
TEST_EXIT(find(feSpaces.begin(), feSpaces.end(), feSpace) != feSpaces.end())
("Should not happen!\n");
probStat->getRhsVector(i)->setDofMap(dofMap[feSpace]);
probStat->getSolution(i)->setDofMap(dofMap[feSpace]);
probStat->getRhsVector(i)->setDofMap(rankDofMap[feSpace]);
probStat->getSolution(i)->setDofMap(rankDofMap[feSpace]);
}
}
......@@ -769,7 +770,7 @@ namespace AMDiS {
void MeshDistributor::setRankDofs()
{
for (unsigned int i = 0; i < problemStat.size(); i++)
setRankDofs(problemStat[i]);
setRankDofs(problemStat[i], dofMap);
}
......
......@@ -347,6 +347,14 @@ namespace AMDiS {
DofComm &dcom,
const FiniteElemSpace *feSpace);
/// Sets \ref isRankDof to all matrices and rhs vectors in a given
/// stationary problem.
void setRankDofs(ProblemStatSeq *probStat, ParallelDofMapping &rankDofMap);
/// Sets \ref isRankDof to all matrices and rhs vectors in all
/// stationary problems.
void setRankDofs();
protected:
void addProblemStat(ProblemStatSeq *probStat);
......@@ -396,14 +404,6 @@ namespace AMDiS {
*/
bool checkAndAdaptBoundary(RankToBoundMap &allBound);
/// Sets \ref isRankDof to all matrices and rhs vectors in a given
/// stationary problem.
void setRankDofs(ProblemStatSeq *probStat);
/// Sets \ref isRankDof to all matrices and rhs vectors in all
/// stationary problems.
void setRankDofs();
/// Removes all periodic boundary condition information from all matrices and
/// vectors of all stationary problems and from the mesh itself.
void removePeriodicBoundaryConditions();
......
......@@ -30,20 +30,6 @@ namespace AMDiS {
ProblemStatSeq::buildAfterCoarsen(adaptInfo, flag,
assembleMatrix,
assembleVector);
#if (DEBUG != 0)
double vm, rss;
processMemUsage(vm, rss);
vm /= 1024.0;
rss /= 1024.0;
MSG("My memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
mpi::globalAdd(vm);
mpi::globalAdd(rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
}
......
......@@ -76,6 +76,24 @@ namespace AMDiS {
}
void PetscProblemStat::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix,
bool assembleVector)
{
FUNCNAME("ParallelProblemStatBase::buildAfterCoarsen()");
TEST_EXIT_DBG(MeshDistributor::globalMeshDistributor != NULL)
("Should not happen!\n");
MeshDistributor::globalMeshDistributor->checkMeshChange();
MeshDistributor::globalMeshDistributor->setRankDofs(this,
petscSolver->getInteriorMap());
ProblemStatSeq::buildAfterCoarsen(adaptInfo, flag,
assembleMatrix,
assembleVector);
}
void PetscProblemStat::solve(AdaptInfo *adaptInfo,
bool createMatrixData,
bool storeMatrixData)
......
......@@ -49,6 +49,10 @@ namespace AMDiS {
delete petscSolver;
}
void buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix,
bool assembleVector);
void initialize(Flag initFlag,
ProblemStatSeq *adoptProblem = NULL,
Flag adoptFlag = INIT_NOTHING);
......
......@@ -85,6 +85,11 @@ namespace AMDiS {
/// Detroys all vector data structures.
virtual void destroyVectorData() = 0;
virtual ParallelDofMapping &getInteriorMap()
{
return meshDistributor->getDofMap();
}
virtual Flag getBoundaryDofRequirement()
{
return 0;
......
......@@ -316,6 +316,7 @@ namespace AMDiS {
DofIndexSet primals;
for (DofContainerSet::iterator it = vertices.begin();
it != vertices.end(); ++it) {
if (dirichletRows[feSpace].count(**it))
continue;
......@@ -340,9 +341,9 @@ namespace AMDiS {
// === create local indices of the primals starting at zero. ===
for (DofIndexSet::iterator it = primals.begin(); it != primals.end(); ++it)
if (meshDistributor->getDofMap()[feSpace].isRankDof(*it))
if (meshDistributor->getDofMap()[feSpace].isRankDof(*it)) {
primalDofMap[feSpace].insertRankDof(*it);
else
} else
primalDofMap[feSpace].insertNonRankDof(*it);
}
......@@ -767,7 +768,7 @@ namespace AMDiS {
MatCreateAIJ(mpiCommGlobal,
nRankEdges, lagrangeMap.getRankDofs(),
nOverallEdges, lagrangeMap.getOverallDofs(),
1, PETSC_NULL, 1, PETSC_NULL,
2, PETSC_NULL, 2, PETSC_NULL,
&mat_augmented_lagrange);
MatSetOption(mat_augmented_lagrange, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
......@@ -918,9 +919,11 @@ namespace AMDiS {
if (creationMode == 0) {
// matK = inv(A_BB) A_BPi
Mat matK;
MSG("START SOLVE!\n");
petsc_helper::blockMatMatSolve(subdomain->getSolver(),
subdomain->getMatInteriorCoarse(),
matK);
MSG("END SOLVE!\n");
// mat_schur_primal = A_PiPi - A_PiB inv(A_BB) A_BPi
......@@ -1459,7 +1462,7 @@ namespace AMDiS {
}
void PetscSolverFeti::dbgMatrix()
void PetscSolverFeti::dbgMatrix(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverFeti::dbgMatrix()");
......@@ -1531,6 +1534,37 @@ namespace AMDiS {
PetscViewerDestroy(&petscView);
}
bool checkInteriorMatrix = false;;
Parameters::get("parallel->debug->check interior matrix",
checkInteriorMatrix);
if (checkInteriorMatrix) {
int nZeroRows = PetscSolverFetiDebug::testZeroRows(subdomain->getMatInterior());
MSG("Interior matrix has %d zero rows!\n", nZeroRows);
}
bool printDirichlet = false;
Parameters::get("parallel->debug->print dirichlet information",
printDirichlet);
if (printDirichlet) {
int nComponents = mat->getSize();
for (int i = 0; i < nComponents; i++) {
DOFMatrix *seqMat = (*mat)[i][i];
if (!seqMat)
continue;
const FiniteElemSpace *feSpace = seqMat->getRowFeSpace();
TEST_EXIT(feSpace)("Should not happen!\n");
std::set<DegreeOfFreedom> &dirichletRows = seqMat->getDirichletRows();
for (std::set<DegreeOfFreedom>::iterator it = dirichletRows.begin();
it != dirichletRows.end(); ++it) {
if (localDofMap[feSpace].isSet(*it)) {
MSG("Dirichlet dof %d in component %d with interior mat index %d\n",
*it, i, localDofMap.getMatIndex(i, *it));
}
}
}
}
int writeCoarseMatrix = 0;
Parameters::get("parallel->debug->write coarse matrix",
......@@ -1744,7 +1778,7 @@ namespace AMDiS {
subdomain->fillPetscMatrix(mat);
dbgMatrix();
dbgMatrix(mat);
if (printTimings) {
MPI::COMM_WORLD.Barrier();
......@@ -1778,7 +1812,7 @@ namespace AMDiS {
createFetiKsp(feSpaces);
// === If required, run debug tests. ===
dbgMatrix();
dbgMatrix(mat);
}
......
......@@ -163,7 +163,7 @@ namespace AMDiS {
/// In debug modes, this function runs some debug tests on the FETI
/// matrices. In optimized mode, nothing is done here.
void dbgMatrix();
void dbgMatrix(Matrix<DOFMatrix*> *mat);
/** \brief
* Recovers AMDiS solution vector from PETSc's solution vectors of the
......@@ -236,6 +236,11 @@ namespace AMDiS {
/// order of: 1/h^2
void createH2vec(DOFVector<double> &vec);
virtual ParallelDofMapping &getInteriorMap()
{
return localDofMap;
}
protected:
/// Mapping from primal DOF indices to a global index of primals.
ParallelDofMapping primalDofMap;
......
......@@ -529,4 +529,38 @@ namespace AMDiS {
VecDestroy(&rhsVec);
}
int PetscSolverFetiDebug::testZeroRows(Mat mat)
{
FUNCNAME("PetscSolverFetiDebug::testZeroRows()");
int nZeroRows = 0;
int firstIndex, lastIndex;
MatGetOwnershipRange(mat, &firstIndex, &lastIndex);
for (int i = firstIndex; i < lastIndex; i++) {
PetscInt nCols;
const PetscScalar *vals;
MatGetRow(mat, i, &nCols, PETSC_NULL, &vals);
bool nonZeroValues = false;
for (int j = 0; j < nCols; j++) {
if (vals[j] != 0.0) {
nonZeroValues = true;
break;
}
}
if (!nonZeroValues) {
MSG("Zero matrix row for global row index %d\n", i);
nZeroRows++;
}
MatRestoreRow(mat, i, &nCols, PETSC_NULL, &vals);
}
return nZeroRows;
}
}
......@@ -127,6 +127,16 @@ namespace AMDiS {
*/
static void debugFeti(PetscSolverFeti &feti,
Vec dbgRhsVec);
/** \brief
* This functions check a PETSc matrix for zero rows. Each zero row index
* is printed to the screen.
*
* \param[in] mat PETSc matrix which should be checked for zero rows.
*
* \return int Number of zero rows in matrix;
*/
static int testZeroRows(Mat mat);
};
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment