Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 0d0838bd authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Blub

parent 42237fb0
...@@ -19,7 +19,9 @@ namespace AMDiS { ...@@ -19,7 +19,9 @@ namespace AMDiS {
using namespace std; using namespace std;
ParallelCoarseSpaceMatVec::ParallelCoarseSpaceMatVec() ParallelCoarseSpaceMatVec::ParallelCoarseSpaceMatVec()
: lastMeshNnz(0), : rStartInterior(0),
nGlobalOverallInterior(0),
lastMeshNnz(0),
alwaysCreateNnzStructure(false) alwaysCreateNnzStructure(false)
{ {
Parameters::get("parallel->always create nnz structure", Parameters::get("parallel->always create nnz structure",
...@@ -61,6 +63,9 @@ namespace AMDiS { ...@@ -61,6 +63,9 @@ namespace AMDiS {
for (int i = 0; i < nCoarseMap + 1; i++) for (int i = 0; i < nCoarseMap + 1; i++)
mat[i].resize(nCoarseMap + 1); mat[i].resize(nCoarseMap + 1);
vecSol.resize(nCoarseMap + 1);
vecRhs.resize(nCoarseMap + 1);
componentIthCoarseMap.resize(coarseSpaceMap.size()); componentIthCoarseMap.resize(coarseSpaceMap.size());
for (unsigned int i = 0; i < componentIthCoarseMap.size(); i++) { for (unsigned int i = 0; i < componentIthCoarseMap.size(); i++) {
bool found = false; bool found = false;
...@@ -81,6 +86,7 @@ namespace AMDiS { ...@@ -81,6 +86,7 @@ namespace AMDiS {
{ {
FUNCNAME("ParallelCoarseSpaceMatVec::create()"); FUNCNAME("ParallelCoarseSpaceMatVec::create()");
updateSubdomainData();
// === If required, recompute non zero structure of the matrix. === // === If required, recompute non zero structure of the matrix. ===
...@@ -115,84 +121,77 @@ namespace AMDiS { ...@@ -115,84 +121,77 @@ namespace AMDiS {
nnz[i][j].create(seqMat, mpiCommGlobal, rowMap, colMap, NULL, nnz[i][j].create(seqMat, mpiCommGlobal, rowMap, colMap, NULL,
meshDistributor->getElementObjectDb()); meshDistributor->getElementObjectDb());
/*
nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
*/
} }
} }
} }
} }
// === Create PETSc matrix with the computed nnz data structure. === // === Create PETSc matrices and PETSc vectors with the computed ===
// === nnz data structure. ===
int nRankRows = interiorMap->getRankDofs(); int nRankRows = interiorMap->getRankDofs();
int nOverallRows = interiorMap->getOverallDofs(); int nOverallRows = interiorMap->getOverallDofs();
if (localMatrix) { if (localMatrix) {
MatCreateSeqAIJ(mpiCommLocal, nRankRows, nRankRows, MatCreateSeqAIJ(mpiCommLocal, nRankRows, nRankRows,
100, PETSC_NULL, 0, nnz[0][0].dnnz,
&mat[0][0]); &mat[0][0]);
MatSetOption(mat[0][0], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE); MatSetOption(mat[0][0], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
} else { } else {
MatCreateAIJ(mpiCommGlobal, nRankRows, nRankRows, MatCreateAIJ(mpiCommGlobal, nRankRows, nRankRows,
nOverallRows, nOverallRows, nOverallRows, nOverallRows,
100, PETSC_NULL, 0, nnz[0][0].dnnz, 0, nnz[0][0].onnz,
100, PETSC_NULL,
&mat[0][0]); &mat[0][0]);
MatSetOption(mat[0][0], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE); MatSetOption(mat[0][0], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
} }
VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &vecSol[0]);
VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &vecRhs[0]);
if (coarseSpaceMap.size()) {
int nCoarseMap = uniqueCoarseMap.size();
for (int i = 0; i < nCoarseMap; i++) {
ParallelDofMapping* cMap = uniqueCoarseMap[i];
int nRowsRankCoarse = cMap->getRankDofs();
int nRowsOverallCoarse = cMap->getOverallDofs(); int nCoarseMap = uniqueCoarseMap.size();
for (int i = 0; i < nCoarseMap; i++) {
ParallelDofMapping* cMap = uniqueCoarseMap[i];
int nRowsRankCoarse = cMap->getRankDofs();
int nRowsOverallCoarse = cMap->getOverallDofs();
MatCreateAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankCoarse,
nRowsOverallCoarse, nRowsOverallCoarse,
0, nnz[i + 1][i + 1].dnnz, 0, nnz[i + 1][i + 1].onnz,
&mat[i + 1][i + 1]);
VecCreateMPI(mpiCommGlobal, nRowsRankCoarse, nRowsOverallCoarse,
&vecSol[i + 1]);
VecCreateMPI(mpiCommGlobal, nRowsRankCoarse, nRowsOverallCoarse,
&vecRhs[i + 1]);
for (int j = 0; j < nCoarseMap + 1; j++) {
int nRowsRankMat = (j == 0 ? nRankRows : uniqueCoarseMap[j - 1]->getRankDofs());
int nRowsOverallMat = (j == 0 ? nOverallRows : uniqueCoarseMap[j - 1]->getOverallDofs());
MatCreateAIJ(mpiCommGlobal, MatCreateAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankCoarse, nRowsRankCoarse, nRowsRankMat,
nRowsOverallCoarse, nRowsOverallCoarse, nRowsOverallCoarse, nRowsOverallMat,
100, PETSC_NULL, 100, PETSC_NULL, 0, nnz[i + 1][j].dnnz, 0, nnz[i + 1][j].onnz,
&mat[i + 1][i + 1]); &mat[i + 1][j]);
MSG("REMOVE THIS LINE WHEN FINISHED!\n"); MatCreateAIJ(mpiCommGlobal,
MatSetOption(mat[i + 1][i + 1], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE); nRowsRankMat, nRowsRankCoarse,
nRowsOverallMat, nRowsOverallCoarse,
for (int j = 0; j < nCoarseMap + 1; j++) { 0, nnz[j][i + 1].dnnz, 0, nnz[j][i + 1].onnz,
int nRowsRankMat = (j == 0 ? nRankRows : uniqueCoarseMap[j - 1]->getRankDofs()); &mat[j][i + 1]);
int nRowsOverallMat = (j == 0 ? nOverallRows : uniqueCoarseMap[j - 1]->getOverallDofs());
MatCreateAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankMat,
nRowsOverallCoarse, nRowsOverallMat,
100, PETSC_NULL, 100, PETSC_NULL,
&mat[i + 1][j]);
MSG("REMOVE THIS LINE WHEN FINISHED!\n");
MatSetOption(mat[i + 1][j], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
MatCreateAIJ(mpiCommGlobal,
nRowsRankMat, nRowsRankCoarse,
nRowsOverallMat, nRowsOverallCoarse,
100, PETSC_NULL, 100, PETSC_NULL,
&mat[j][i + 1]);
MSG("REMOVE THIS LINE WHEN FINISHED!\n");
MatSetOption(mat[j][i + 1], MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
}
} }
} }
} }
void ParallelCoarseSpaceMatVec::destroy() void ParallelCoarseSpaceMatVec::matDestroy()
{ {
FUNCNAME("ParallelCoarseSpaceMatVec::destroy()"); FUNCNAME("ParallelCoarseSpaceMatVec::matDestroy()");
int nMatrix = mat.size(); int nMatrix = mat.size();
for (int i = 0; i < nMatrix; i++) for (int i = 0; i < nMatrix; i++)
...@@ -201,9 +200,21 @@ namespace AMDiS { ...@@ -201,9 +200,21 @@ namespace AMDiS {
} }
void ParallelCoarseSpaceMatVec::assembly() void ParallelCoarseSpaceMatVec::vecDestroy()
{ {
FUNCNAME("ParallelCoarseSpaceMatVec::assembly()"); FUNCNAME("ParallelCoarseSpaceMatVec::vecDestroy()");
int nVec = vecSol.size();
for (int i = 0; i < nVec; i++) {
VecDestroy(&vecSol[i]);
VecDestroy(&vecRhs[i]);
}
}
void ParallelCoarseSpaceMatVec::matAssembly()
{
FUNCNAME("ParallelCoarseSpaceMatVec::matAssembly()");
int nMatrix = mat.size(); int nMatrix = mat.size();
for (int i = 0; i < nMatrix; i++) { for (int i = 0; i < nMatrix; i++) {
...@@ -215,6 +226,30 @@ namespace AMDiS { ...@@ -215,6 +226,30 @@ namespace AMDiS {
} }
void ParallelCoarseSpaceMatVec::vecRhsAssembly()
{
FUNCNAME("ParallelCoarseSpaceMatVec::vecRhsAssembly()");
int nVec = vecRhs.size();
for (int i = 0; i < nVec; i++) {
VecAssemblyBegin(vecRhs[i]);
VecAssemblyEnd(vecRhs[i]);
}
}
void ParallelCoarseSpaceMatVec::vecSolAssembly()
{
FUNCNAME("ParallelCoarseSpaceMatVec::vecSolAssembly()");
int nVec = vecRhs.size();
for (int i = 0; i < nVec; i++) {
VecAssemblyBegin(vecSol[i]);
VecAssemblyEnd(vecSol[i]);
}
}
bool ParallelCoarseSpaceMatVec::checkMeshChange(Matrix<DOFMatrix*> &seqMat, bool ParallelCoarseSpaceMatVec::checkMeshChange(Matrix<DOFMatrix*> &seqMat,
bool localMatrix) bool localMatrix)
{ {
...@@ -231,7 +266,6 @@ namespace AMDiS { ...@@ -231,7 +266,6 @@ namespace AMDiS {
interiorMap->setComputeMatIndex(!localMatrix); interiorMap->setComputeMatIndex(!localMatrix);
interiorMap->update(feSpaces); interiorMap->update(feSpaces);
// updateSubdomainData();
lastMeshNnz = meshDistributor->getLastMeshChangeIndex(); lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
return true; return true;
...@@ -240,4 +274,28 @@ namespace AMDiS { ...@@ -240,4 +274,28 @@ namespace AMDiS {
return false; return false;
} }
void ParallelCoarseSpaceMatVec::updateSubdomainData()
{
FUNCNAME("ParallelCoarseSpaceMatVec::updateSubdomainData()");
if (mpiCommLocal.Get_size() == 1) {
rStartInterior = 0;
nGlobalOverallInterior = interiorMap->getOverallDofs();
} else {
int groupRowsInterior = 0;
if (mpiCommLocal.Get_rank() == 0)
groupRowsInterior = interiorMap->getOverallDofs();
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
int tmp = 0;
if (mpiCommLocal.Get_rank() == 0)
tmp = rStartInterior;
mpiCommLocal.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
}
}
} }
...@@ -57,10 +57,17 @@ namespace AMDiS { ...@@ -57,10 +57,17 @@ namespace AMDiS {
void create(Matrix<DOFMatrix*>& seqMat); void create(Matrix<DOFMatrix*>& seqMat);
/// Run PETSc's assembly routines. /// Run PETSc's matrix assembly routines.
void assembly(); void matAssembly();
void destroy(); /// Run PETSc's vector assembly routines.
void vecRhsAssembly();
void vecSolAssembly();
void matDestroy();
void vecDestroy();
bool checkMeshChange(Matrix<DOFMatrix*> &mat, bool checkMeshChange(Matrix<DOFMatrix*> &mat,
bool localMatrix = false); bool localMatrix = false);
...@@ -108,9 +115,39 @@ namespace AMDiS { ...@@ -108,9 +115,39 @@ namespace AMDiS {
return mat[matIndex][0]; return mat[matIndex][0];
} }
inline Vec& getInteriorVecRhs()
{
return vecRhs[0];
}
inline Vec& getCoarseVecRhs(int coarseSpace = 0)
{
return vecRhs[coarseSpace + 1];
}
inline Vec& getInteriorVecSol()
{
return vecSol[0];
}
inline Vec& getCoarseVecSol(int coarseSpace = 0)
{
return vecSol[coarseSpace + 1];
}
inline int getStartInterior() const
{
return rStartInterior;
}
protected:
void updateSubdomainData();
private: private:
vector<vector<Mat> > mat; vector<vector<Mat> > mat;
vector<Vec> vecSol, vecRhs;
vector<vector<MatrixNnzStructure> > nnz; vector<vector<MatrixNnzStructure> > nnz;
ParallelDofMapping *interiorMap; ParallelDofMapping *interiorMap;
...@@ -131,6 +168,10 @@ namespace AMDiS { ...@@ -131,6 +168,10 @@ namespace AMDiS {
MPI::Intracomm mpiCommGlobal; MPI::Intracomm mpiCommGlobal;
int rStartInterior;
int nGlobalOverallInterior;
/// Stores the mesh change index of the mesh the nnz structure was created for. /// Stores the mesh change index of the mesh the nnz structure was created for.
/// Therefore, if the mesh change index is higher than this value, we have to create /// Therefore, if the mesh change index is higher than this value, we have to create
/// a new nnz structure for PETSc matrices, because the mesh has been changed and /// a new nnz structure for PETSc matrices, because the mesh has been changed and
......
...@@ -60,30 +60,6 @@ namespace AMDiS { ...@@ -60,30 +60,6 @@ namespace AMDiS {
} }
void PetscSolver::updateSubdomainData()
{
FUNCNAME("PetscSolver::updateSubdomainData()");
if (mpiCommLocal.Get_size() == 1) {
rStartInterior = 0;
nGlobalOverallInterior = interiorMap->getOverallDofs();
} else {
int groupRowsInterior = 0;
if (mpiCommLocal.Get_rank() == 0)
groupRowsInterior = interiorMap->getOverallDofs();
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
int tmp = 0;
if (mpiCommLocal.Get_rank() == 0)
tmp = rStartInterior;
mpiCommLocal.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
}
}
void PetscSolver::solve(Vec &rhs, Vec &sol) void PetscSolver::solve(Vec &rhs, Vec &sol)
{ {
FUNCNAME("PetscSolver::solve()"); FUNCNAME("PetscSolver::solve()");
......
...@@ -168,12 +168,27 @@ namespace AMDiS { ...@@ -168,12 +168,27 @@ namespace AMDiS {
TEST_EXIT_DBG(coarseSpaceMap.size()) TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n"); ("Subdomain solver does not contain a coarse space!\n");
return rhsCoarseSpace; return petscData.getCoarseVecRhs();
} }
inline Vec& getRhsInterior() inline Vec& getRhsInterior()
{ {
return rhsInterior; return petscData.getInteriorVecRhs();
}
inline Vec& getSolCoarseSpace()
{
FUNCNAME("PetscSolver::getSolCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getCoarseVecSol();
}
inline Vec& getSolInterior()
{
return petscData.getInteriorVecSol();
} }
inline Mat& getMatIntInt() inline Mat& getMatIntInt()
...@@ -232,17 +247,11 @@ namespace AMDiS { ...@@ -232,17 +247,11 @@ namespace AMDiS {
/// Returns a vector with the FE spaces of all components of a system vector. /// Returns a vector with the FE spaces of all components of a system vector.
vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec); vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec);
void updateSubdomainData();
protected: protected:
MeshDistributor *meshDistributor; MeshDistributor *meshDistributor;
int subdomainLevel; int subdomainLevel;
int rStartInterior;
int nGlobalOverallInterior;
ParallelDofMapping *interiorMap; ParallelDofMapping *interiorMap;
/// Parallel DOF mapping of the (optional) coarse space. Allows to define /// Parallel DOF mapping of the (optional) coarse space. Allows to define
...@@ -259,12 +268,6 @@ namespace AMDiS { ...@@ -259,12 +268,6 @@ namespace AMDiS {
/// a coarse space defined). /// a coarse space defined).
ParallelCoarseSpaceMatVec petscData; ParallelCoarseSpaceMatVec petscData;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
Vec rhsInterior;
Vec rhsCoarseSpace;
/// PETSc solver object /// PETSc solver object
KSP kspInterior; KSP kspInterior;
......
...@@ -93,7 +93,7 @@ namespace AMDiS { ...@@ -93,7 +93,7 @@ namespace AMDiS {
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime); MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif #endif
petscData.assembly(); petscData.matAssembly();
// === Init PETSc solver. === // === Init PETSc solver. ===
KSPCreate(mpiCommGlobal, &kspInterior); KSPCreate(mpiCommGlobal, &kspInterior);
...@@ -129,10 +129,9 @@ namespace AMDiS { ...@@ -129,10 +129,9 @@ namespace AMDiS {
} }
VecCreateNest(mpiCommGlobal, nComponents, PETSC_NULL, VecCreateNest(mpiCommGlobal, nComponents, PETSC_NULL,
&(nestVec[0]), &rhsInterior); &(nestVec[0]), &(getRhsInterior()));
VecAssemblyBegin(rhsInterior); petscData.vecRhsAssembly();
VecAssemblyEnd(rhsInterior);
} }
...@@ -145,10 +144,10 @@ namespace AMDiS { ...@@ -145,10 +144,10 @@ namespace AMDiS {
setBlockPreconditioner(pcInterior); setBlockPreconditioner(pcInterior);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
VecDuplicate(rhsInterior, &petscSolVec); VecDuplicate(getRhsInterior(), &petscSolVec);
// PETSc. // PETSc.
solve(rhsInterior, petscSolVec); solve(getRhsInterior(), petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. === // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
for (int i = 0; i < nComponents; i++) { for (int i = 0; i < nComponents; i++) {
...@@ -184,7 +183,7 @@ namespace AMDiS { ...@@ -184,7 +183,7 @@ namespace AMDiS {
if (nestMat[i] != PETSC_NULL) if (nestMat[i] != PETSC_NULL)
MatDestroy(&(nestMat[i])); MatDestroy(&(nestMat[i]));
petscData.destroy(); petscData.matDestroy();
KSPDestroy(&kspInterior); KSPDestroy(&kspInterior);
} }
...@@ -194,9 +193,7 @@ namespace AMDiS { ...@@ -194,9 +193,7 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()"); FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");
VecDestroy(&rhsInterior); petscData.vecDestroy();
for (int i = 0; i < nComponents; i++)
VecDestroy(&(nestVec[i]));
VecDestroy(&petscSolVec); VecDestroy(&petscSolVec);
} }
......
...@@ -21,44 +21,28 @@ namespace AMDiS { ...@@ -21,44 +21,28 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()"); FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime();
petscData.init(interiorMap, coarseSpaceMap, petscData.init(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal, subdomainLevel, mpiCommLocal, mpiCommGlobal,
meshDistributor); meshDistributor);
petscData.create(*seqMat);