Commit 2e22f957 authored by Thomas Witkowski's avatar Thomas Witkowski

PETSc global block solver should now work correctly.

parent ac22232d
......@@ -78,7 +78,7 @@ namespace AMDiS {
partitioner(NULL),
nRankDofs(0),
nOverallDofs(0),
rstart(0),
rStartDofs(0),
deserialized(false),
writeSerializationFile(false),
repartitioningAllowed(false),
......@@ -1837,13 +1837,13 @@ namespace AMDiS {
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size();
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
mpi::getDofNumbering(mpiComm, nRankDofs, rStartDofs, nOverallDofs);
// Stores for all rank owned DOFs a new global index.
DofIndexMap rankDofsNewGlobalIndex;
for (int i = 0; i < nRankDofs; i++)
rankDofsNewGlobalIndex[rankDofs[i]] = i + rstart;
rankDofsNewGlobalIndex[rankDofs[i]] = i + rStartDofs;
// === Send and receive new DOF indices. ===
......@@ -1906,7 +1906,7 @@ namespace AMDiS {
MSG("------------- Debug information -------------\n");
MSG("nRankDofs = %d\n", nRankDofs);
MSG("nOverallDofs = %d\n", nOverallDofs);
MSG("rstart %d\n", rstart);
MSG("rStartDofs %d\n", rStartDofs);
stringstream oss;
oss << debugOutputDir << "elementIndex-" << mpiRank << ".vtu";
......@@ -2170,7 +2170,7 @@ namespace AMDiS {
serialize(out, periodicDof);
serialize(out, periodicDofAssociations);
SerUtil::serialize(out, rstart);
SerUtil::serialize(out, rStartDofs);
SerUtil::serialize(out, macroElementNeighbours);
int nSize = allMacroElements.size();
......@@ -2231,7 +2231,7 @@ namespace AMDiS {
deserialize(in, periodicDof);
deserialize(in, periodicDofAssociations);
SerUtil::deserialize(in, rstart);
SerUtil::deserialize(in, rStartDofs);
SerUtil::deserialize(in, macroElementNeighbours);
int nSize = 0;
......
......@@ -136,6 +136,12 @@ namespace AMDiS {
return nRankDofs;
}
/// Returns \ref rStartDofs, the first global DOF index owned by rank.
inline int getStartDofs()
{
return rStartDofs;
}
/// Returns \ref nOverallDofs, the global number of DOFs.
inline int getNumberOverallDofs()
{
......@@ -225,11 +231,6 @@ namespace AMDiS {
return lastMeshChangeIndex;
}
inline int getRstart()
{
return rstart;
}
inline int getMpiRank()
{
return mpiRank;
......@@ -521,6 +522,9 @@ namespace AMDiS {
/// Number of DOFs in the rank mesh.
int nRankDofs;
/// Is the index of the first global DOF index, which is owned by the rank.
int rStartDofs;
/// Number of DOFs in the whole domain.
int nOverallDofs;
......@@ -604,10 +608,6 @@ namespace AMDiS {
/// repartitioned.
vector<DOFVector<double>*> interchangeVectors;
/// Is the index of the first row of the linear system, which is owned by
/// the rank.
int rstart;
/** \brief
* If the problem definition has been read from a serialization file, this
* variable is true, otherwise it is false. This variable is used to stop the
......
......@@ -25,7 +25,8 @@ namespace AMDiS {
PetscProblemStat::PetscProblemStat(string nameStr,
ProblemIterationInterface *problemIteration)
: ParallelProblemStatBase(nameStr, problemIteration)
: ParallelProblemStatBase(nameStr, problemIteration),
deleteSolver(true)
{
FUNCNAME("PetscProblemStat::PetscProblemStat()");
......@@ -54,6 +55,14 @@ namespace AMDiS {
}
PetscProblemStat::PetscProblemStat(string nameStr,
PetscSolver *solver)
: ParallelProblemStatBase(nameStr, NULL),
petscSolver(solver),
deleteSolver(false)
{}
void PetscProblemStat::initialize(Flag initFlag,
ProblemStatSeq* adoptProblem,
Flag adoptFlag)
......
......@@ -40,9 +40,13 @@ namespace AMDiS {
PetscProblemStat(std::string nameStr,
ProblemIterationInterface *problemIteration = NULL);
PetscProblemStat(std::string nameStr,
PetscSolver *solver);
~PetscProblemStat()
{
delete petscSolver;
if (deleteSolver)
delete petscSolver;
}
void initialize(Flag initFlag,
......@@ -55,6 +59,8 @@ namespace AMDiS {
protected:
PetscSolver *petscSolver;
bool deleteSolver;
};
typedef PetscProblemStat ParallelProblemStat;
......
......@@ -40,4 +40,35 @@ namespace AMDiS {
}
}
void PetscSolver::copyVec(Vec& originVec, Vec& destVec,
vector<int>& originIndex, vector<int>& destIndex)
{
FUNCNAME("PetscSolver::copyVec()");
IS originIs, destIs;
ISCreateGeneral(PETSC_COMM_WORLD,
originIndex.size(),
&(originIndex[0]),
PETSC_USE_POINTER,
&originIs);
ISCreateGeneral(PETSC_COMM_WORLD,
destIndex.size(),
&(destIndex[0]),
PETSC_USE_POINTER,
&destIs);
VecScatter scatter;
VecScatterCreate(originVec, originIs, destVec, destIs, &scatter);
VecScatterBegin(scatter, originVec, destVec,
INSERT_VALUES, SCATTER_FORWARD);
VecScatterEnd(scatter, originVec, destVec,
INSERT_VALUES, SCATTER_FORWARD);
ISDestroy(&originIs);
ISDestroy(&destIs);
VecScatterDestroy(&scatter);
}
}
......@@ -90,6 +90,20 @@ namespace AMDiS {
bool iterationCounter = true,
bool residual = true);
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
*
* \param[in] originVec The PETSc vector from which we copy from.
* \param[out] destVec The PETSc vector we copy too.
* \param[in] originIndex Set of global indices referring to the
* origin vector.
* \param[in] destIndex Set of global indices referring to the
* destination vector.
*/
void copyVec(Vec& originVec, Vec& destVec,
vector<int>& originIndex, vector<int>& destIndex);
protected:
MeshDistributor *meshDistributor;
......
......@@ -25,22 +25,14 @@ namespace AMDiS {
double wtime = MPI::Wtime();
nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscTmpVec);
int nRankRows = meshDistributor->getNumberRankDofs();
int nOverallRows = meshDistributor->getNumberOverallDofs();
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
nestMat = new Mat*[nComponents];
for (int i = 0; i < nComponents; i++)
nestMat[i] = new Mat[nComponents];
nestMat.resize(nComponents * nComponents);
// === Transfer values from DOF matrices to the PETSc matrix. ===
......@@ -51,16 +43,17 @@ namespace AMDiS {
nRankRows, nRankRows,
nOverallRows, nOverallRows,
30, PETSC_NULL, 30, PETSC_NULL,
&(nestMat[i][j]));
setDofMatrix(nestMat[i][j], (*mat)[i][j]);
&(nestMat[i * nComponents + j]));
setDofMatrix(nestMat[i * nComponents + j], (*mat)[i][j]);
MatAssemblyBegin(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
} else {
nestMat[i][j] = PETSC_NULL;
nestMat[i * nComponents + j] = PETSC_NULL;
}
MatCreateNest(PETSC_COMM_WORLD,
nComponents, PETSC_NULL, nComponents, PETSC_NULL,
&(nestMat[0][0]), &petscMatrix);
&(nestMat[0]), &petscMatrix);
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
......@@ -71,12 +64,11 @@ namespace AMDiS {
// === Init PETSc solver. ===
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
KSPGetPC(solver, &pc);
setBlockPreconditioner(pc);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
}
......@@ -89,15 +81,22 @@ namespace AMDiS {
TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
nComponents = vec->getSize();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
int nRankRows = meshDistributor->getNumberRankDofs();
int nOverallRows = meshDistributor->getNumberOverallDofs();
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscRhsVec);
nestVec.resize(nComponents);
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < nComponents; i++) {
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &(nestVec[i]));
for (int i = 0; i < nComponents; i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
setDofVector(nestVec[i], vec->getDOFVector(i));
VecAssemblyBegin(nestVec[i]);
VecAssemblyEnd(nestVec[i]);
}
VecCreateNest(PETSC_COMM_WORLD, nComponents, PETSC_NULL,
&(nestVec[0]), &petscRhsVec);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
......@@ -110,21 +109,24 @@ namespace AMDiS {
FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");
// PETSc.
KSPSolve(solver, petscRhsVec, petscSolVec);
KSPSolve(solver, petscRhsVec, petscRhsVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int nRankDofs = meshDistributor->getNumberRankDofs();
PetscScalar *vecPointer;
VecGetArray(petscSolVec, &vecPointer);
for (int i = 0; i < nComponents; i++) {
DOFVector<double> &dofvec = *(vec.getDOFVector(i));
Vec tmp;
VecNestGetSubVec(petscRhsVec, i, &tmp);
int nRankDofs = meshDistributor->getNumberRankDofs();
PetscScalar *vecPointer;
VecGetArray(tmp, &vecPointer);
for (int j = 0; j < nRankDofs; j++)
dofvec[meshDistributor->mapLocalToDofIndex(j)] =
vecPointer[i * nComponents + j];
}
dofvec[meshDistributor->mapLocalToDofIndex(j)] = vecPointer[j];
VecRestoreArray(petscSolVec, &vecPointer);
VecRestoreArray(tmp, &vecPointer);
}
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
......@@ -132,12 +134,10 @@ namespace AMDiS {
meshDistributor->synchVector(vec);
// Print iteration counter and residual norm of the solution.
printSolutionInfo(adaptInfo);
// === Destroy PETSc's variables. ===
VecDestroy(&petscRhsVec);
for (int i = 0; i < nComponents; i++)
VecDestroy(&(nestVec[i]));
}
......@@ -145,20 +145,12 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");
for (unsigned int i = 0; i < nestMat.size(); i++)
if (nestMat[i] != PETSC_NULL)
MatDestroy(&(nestMat[i]));
MatDestroy(&petscMatrix);
KSPDestroy(&solver);
VecDestroy(&petscSolVec);
VecDestroy(&petscTmpVec);
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if (nestMat[i][j] != PETSC_NULL)
MatDestroy(&(nestMat[i][j]));
}
delete[] nestMat[i];
}
delete[] nestMat;
}
......@@ -209,32 +201,22 @@ namespace AMDiS {
cols.push_back(colIndex);
values.push_back(value(*icursor));
}
MatSetValues(petscMatrix, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
MatSetValues(petscMat, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
}
}
void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec,
DOFVector<double>* vec,
int nComponents,
int row,
bool rankOnly)
DOFVector<double>* vec)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");
// Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the DOF.
DegreeOfFreedom globalRowDof =
meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
// Calculate PETSc index of the row DOF.
int index = nComponents * row + globalRowDof;
int index = meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
double value = *dofIt;
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
......
......@@ -52,11 +52,18 @@ namespace AMDiS {
void setDofMatrix(Mat& petscMat, DOFMatrix* mat);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int nComponents, int row, bool rankOnly = false);
void setDofVector(Vec& petscVec, DOFVector<double>* vec);
virtual void setBlockPreconditioner(PC &pc)
{
PCSetFromOptions(pc);
}
protected:
Mat **nestMat;
vector<Mat> nestMat;
vector<Vec> nestVec;
int nComponents;
};
......
......@@ -66,9 +66,9 @@ namespace AMDiS {
#if (DEBUG != 0)
int a, b;
MatGetOwnershipRange(petscMatrix, &a, &b);
TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
TEST_EXIT(a == meshDistributor->getStartDofs() * nComponents)
("Wrong matrix ownership range!\n");
TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
TEST_EXIT(b == meshDistributor->getStartDofs() * nComponents + nRankRows)
("Wrong matrix ownership range!\n");
#endif
......@@ -552,7 +552,7 @@ namespace AMDiS {
// This is the local row index of the local PETSc matrix.
int localPetscRowIdx =
petscRowIdx - meshDistributor->getRstart() * nComponents;
petscRowIdx - meshDistributor->getStartDofs() * nComponents;
TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
("Should not happen! \n Debug info: localRowIdx = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d nCompontens = %d nRankRows = %d\n",
......@@ -560,7 +560,7 @@ namespace AMDiS {
meshDistributor->mapLocalToGlobal(*cursor),
petscRowIdx,
localPetscRowIdx,
meshDistributor->getRstart(),
meshDistributor->getStartDofs(),
nComponents,
nRankRows);
......@@ -574,8 +574,8 @@ namespace AMDiS {
if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the d_nnz values for this row, otherwise the o_nnz value.
if (petscColIdx >= meshDistributor->getRstart() * nComponents &&
petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
if (petscColIdx >= meshDistributor->getStartDofs() * nComponents &&
petscColIdx < meshDistributor->getStartDofs() * nComponents + nRankRows)
d_nnz[localPetscRowIdx]++;
else
o_nnz[localPetscRowIdx]++;
......@@ -626,14 +626,14 @@ namespace AMDiS {
int r = it->second[i].first;
int c = it->second[i].second;
int localRowIdx = r - meshDistributor->getRstart() * nComponents;
int localRowIdx = r - meshDistributor->getStartDofs() * nComponents;
TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
r, localRowIdx, nRankRows, it->first);
if (c < meshDistributor->getRstart() * nComponents ||
c >= meshDistributor->getRstart() * nComponents + nRankRows)
if (c < meshDistributor->getStartDofs() * nComponents ||
c >= meshDistributor->getStartDofs() * nComponents + nRankRows)
o_nnz[localRowIdx]++;
else
d_nnz[localRowIdx]++;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment