Am Montag, 13. Mai 2022, finden Wartungsarbeiten am Gitlab-Server (Update auf neue Version statt). Der Dienst wird daher am Montag für einige Zeit nicht verfügbar sein.
On Monday, May 13th 2022, the Gitlab server will be updated. The service will therefore not be accessible for some time on Monday.

Commit a125ff82 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Blub, go home.

parent 0e951559
......@@ -313,6 +313,11 @@ namespace AMDiS {
needMatIndexFromGlobal = global;
}
inline bool isMatIndexFromGlobal()
{
return needMatIndexFromGlobal;
}
/// Access the DOF mapping for a given FE space.
inline FeSpaceDofMap& operator[](const FiniteElemSpace* feSpace)
{
......@@ -436,6 +441,11 @@ namespace AMDiS {
/// mapping from DOF indices to matrix row indices is defined on local
/// or global DOF indices. If true, the mapping is to specify and to use
/// on global ones, otherwise on local DOF indices.
/// In most scenarios the mapping stored on local DOF indices is what we
/// want to have. Only when periodic boundary conditions are used together
/// with some global matrix approache, the matrix indices must be stored
/// also for DOFs that are not part of the local subdomain. Thus, the
/// mapping will be stored on global DOF indices.
bool needMatIndexFromGlobal;
/// Maps from FE space pointers to DOF mappings.
......
......@@ -65,12 +65,6 @@ namespace AMDiS {
VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &petscSolVec);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1)
VecSetBlockSize(petscSolVec, testddd);
// === Create PETSc matrix with the computed nnz data structure. ===
......@@ -78,11 +72,6 @@ namespace AMDiS {
nOverallRows, nOverallRows,
0, d_nnz, 0, o_nnz, &matIntInt);
if (testddd > 1) {
MatSetBlockSize(matIntInt, testddd);
MSG("MAT SET BLOCK SIZE: %d\n", testddd);
}
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
......@@ -328,33 +317,40 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()");
if (coarseSpaceMap) {
fillPetscRhsWithCoarseSpace(vec);
return;
}
VecCreateMPI(mpiCommGlobal,
interiorMap->getRankDofs(),
nGlobalOverallInterior,
&rhsInterior);
if (coarseSpaceMap)
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
&rhsCoarseSpace);
TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel DOF map defined!\n");
int nRankRows = interiorMap->getRankDofs();
int nOverallRows = interiorMap->getOverallDofs();
VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &rhsInterior);
int testddd = 1;
Parameters::get("block size", testddd);
if (testddd > 1)
VecSetBlockSize(rhsInterior, testddd);
if (coarseSpaceMap) {
fillPetscRhsWithCoarseSpace(vec);
} else {
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < vec->getSize(); i++)
setDofVector(rhsInterior, vec->getDOFVector(i), i);
}
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
if (removeRhsNullSpace) {
TEST_EXIT_DBG(coarseSpaceMap == NULL)("Not supported!\n");
MSG("Remove constant null space from the RHS!\n");
MatNullSpace sp;
MatNullSpaceCreate(mpiCommGlobal, PETSC_TRUE, 0, PETSC_NULL, &sp);
......@@ -368,18 +364,6 @@ namespace AMDiS {
{
FUNCNAME("SubDomainSolver::fillPetscRhs()");
VecCreateMPI(mpiCommGlobal,
interiorMap->getRankDofs(),
nGlobalOverallInterior,
&rhsInterior);
if (coarseSpaceMap)
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
&rhsCoarseSpace);
for (int i = 0; i < vec->getSize(); i++) {
const FiniteElemSpace *feSpace = vec->getDOFVector(i)->getFeSpace();
DOFVector<double>::Iterator dofIt(vec->getDOFVector(i), USED_DOFS);
......@@ -394,14 +378,6 @@ namespace AMDiS {
}
}
}
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
}
......@@ -748,23 +724,26 @@ namespace AMDiS {
DegreeOfFreedom globalRowDof =
(*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
// Get PETSc's mat index of the row DOF.
int index = interiorMap->getMatIndex(nRowVec, globalRowDof);
int index = 0;
if (interiorMap->isMatIndexFromGlobal())
index = interiorMap->getMatIndex(nRowVec, globalRowDof);
else
index = interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex());
if (perMap.isPeriodic(feSpace, globalRowDof)) {
std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof);
double value = *dofIt / (perAsc.size() + 1.0);
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
VecSetValue(petscVec, index, value, ADD_VALUES);
for (std::set<int>::iterator perIt = perAsc.begin();
perIt != perAsc.end(); ++perIt) {
int mappedDof = perMap.map(feSpace, *perIt, globalRowDof);
int mappedIndex = interiorMap->getMatIndex(nRowVec, mappedDof);
VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
VecSetValue(petscVec, mappedIndex, value, ADD_VALUES);
}
} else {
// The DOF index is not periodic.
double value = *dofIt;
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
VecSetValue(petscVec, index, *dofIt, ADD_VALUES);
}
}
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment