Commit ef9e1c0f authored by Thomas Witkowski's avatar Thomas Witkowski

Work on usage of different FE spaces in parallel computations.

parent cfc99b53
...@@ -1945,7 +1945,7 @@ namespace AMDiS { ...@@ -1945,7 +1945,7 @@ namespace AMDiS {
// Stores for all rank owned DOFs a new global index. // Stores for all rank owned DOFs a new global index.
DofIndexMap rankDofsNewGlobalIndex; DofIndexMap rankDofsNewGlobalIndex;
for (int i = 0; i < dofFeData[feSpace].nRankDofs; i++) for (unsigned int i = 0; i < rankDofs.size(); i++)
rankDofsNewGlobalIndex[rankDofs[i]] = i + dofFeData[feSpace].rStartDofs; rankDofsNewGlobalIndex[rankDofs[i]] = i + dofFeData[feSpace].rStartDofs;
...@@ -1990,7 +1990,8 @@ namespace AMDiS { ...@@ -1990,7 +1990,8 @@ namespace AMDiS {
} }
// === Create now the local to global index and local to DOF index mappings. === // === Create now the local to global index and local to DOF ===
// === index mappings. ===
dofFeData[feSpace].mapLocalGlobalDofs.clear(); dofFeData[feSpace].mapLocalGlobalDofs.clear();
dofFeData[feSpace].mapLocalDofIndex.clear(); dofFeData[feSpace].mapLocalDofIndex.clear();
...@@ -1999,11 +2000,11 @@ namespace AMDiS { ...@@ -1999,11 +2000,11 @@ namespace AMDiS {
dofIt != rankDofsNewGlobalIndex.end(); ++dofIt) dofIt != rankDofsNewGlobalIndex.end(); ++dofIt)
dofFeData[feSpace].mapLocalGlobalDofs[*(dofIt->first)] = dofIt->second; dofFeData[feSpace].mapLocalGlobalDofs[*(dofIt->first)] = dofIt->second;
for (int i = 0; i < dofFeData[feSpace].nRankDofs; i++) for (unsigned int i = 0; i < rankDofs.size(); i++)
dofFeData[feSpace].mapLocalDofIndex[i] = *(rankDofs[i]); dofFeData[feSpace].mapLocalDofIndex[i] = *(rankDofs[i]);
// === Update dof admins due to new number of DOFs. === // === Update DOF admins due to new number of DOFs. ===
lastMeshChangeIndex = mesh->getChangeIndex(); lastMeshChangeIndex = mesh->getChangeIndex();
} }
......
...@@ -167,7 +167,7 @@ namespace AMDiS { ...@@ -167,7 +167,7 @@ namespace AMDiS {
return feSpaces; return feSpaces;
} }
/// Returns \ref nRankDOFs, the number of DOFs in the rank mesh. /// Returns the number of DOFs in rank's domain for a given FE space.
inline int getNumberRankDofs(const FiniteElemSpace *feSpace) inline int getNumberRankDofs(const FiniteElemSpace *feSpace)
{ {
FUNCNAME("MeshDistributor::getNumberRankDofs()"); FUNCNAME("MeshDistributor::getNumberRankDofs()");
...@@ -177,7 +177,21 @@ namespace AMDiS { ...@@ -177,7 +177,21 @@ namespace AMDiS {
return dofFeData[feSpace].nRankDofs; return dofFeData[feSpace].nRankDofs;
} }
/// Returns \ref rStartDofs, the first global DOF index owned by rank. /// Returns the number of DOFs in rank's domain for a set of FE spaces.
inline int getNumberRankDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getNumberRankDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++) {
TEST_EXIT_DBG(dofFeData.count(feSpaces[i]))("Should not happen!\n");
result += dofFeData[feSpaces[i]].nRankDofs;
}
return result;
}
/// Returns the first global DOF index of an FE space, owned by rank.
inline int getStartDofs(const FiniteElemSpace *feSpace) inline int getStartDofs(const FiniteElemSpace *feSpace)
{ {
FUNCNAME("MeshDistributor::getStartDofs()"); FUNCNAME("MeshDistributor::getStartDofs()");
...@@ -187,7 +201,22 @@ namespace AMDiS { ...@@ -187,7 +201,22 @@ namespace AMDiS {
return dofFeData[feSpace].rStartDofs; return dofFeData[feSpace].rStartDofs;
} }
/// Returns \ref nOverallDofs, the global number of DOFs. /// Returns the first global DOF index for a set of FE spaces, owned by rank.
inline int getStartDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getStartDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++) {
TEST_EXIT_DBG(dofFeData.count(feSpaces[i]))("Should not happen!\n");
result += dofFeData[feSpaces[i]].rStartDofs;
}
return result;
}
/// Returns the global number of DOFs for a given FE space.
inline int getNumberOverallDofs(const FiniteElemSpace *feSpace) inline int getNumberOverallDofs(const FiniteElemSpace *feSpace)
{ {
FUNCNAME("MeshDistributor::getNumberOverallDofs()"); FUNCNAME("MeshDistributor::getNumberOverallDofs()");
...@@ -197,6 +226,21 @@ namespace AMDiS { ...@@ -197,6 +226,21 @@ namespace AMDiS {
return dofFeData[feSpace].nOverallDofs; return dofFeData[feSpace].nOverallDofs;
} }
/// Returns the global number of DOFs for a set of FE spaces.
inline int getNumberOverallDofs(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("MeshDistributor::getNumberOverallDofs()");
int result = 0;
for (unsigned int i = 0; i < feSpaces.size(); i++) {
TEST_EXIT_DBG(dofFeData.count(feSpaces[i]))("Should not happen!\n");
result += dofFeData[feSpaces[i]].nOverallDofs;
}
return result;
}
inline DofMapping& getMapLocalGlobalDofs(const FiniteElemSpace *feSpace) inline DofMapping& getMapLocalGlobalDofs(const FiniteElemSpace *feSpace)
{ {
FUNCNAME("MeshDistributor::getMapLocalGlobalDofs()"); FUNCNAME("MeshDistributor::getMapLocalGlobalDofs()");
......
...@@ -71,4 +71,64 @@ namespace AMDiS { ...@@ -71,4 +71,64 @@ namespace AMDiS {
VecScatterDestroy(&scatter); VecScatterDestroy(&scatter);
} }
vector<const FiniteElemSpace*> PetscSolver::getFeSpaces(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolver::getFeSpaces()");
int nComponents = mat->getNumRows();
vector<const FiniteElemSpace*> result(nComponents);
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) {
result[i] = (*mat)[i][j]->getRowFeSpace();
break;
}
#if (DEBUG != 0)
// === In debug mode, we test if all FE spaces of the matrix are also ===
// === considered by the mesh distributor. ===
checkFeSpaces(result);
#endif
return result;
}
vector<const FiniteElemSpace*> PetscSolver::getFeSpaces(SystemVector *vec)
{
FUNCNAME("PetscSolver::getFeSpaces()");
int nComponents = vec->getSize();
vector<const FiniteElemSpace*> result(nComponents);
for (int i = 0; i < nComponents; i++)
result[i] = vec->getFeSpace(i);
#if (DEBUG != 0)
// === In debug mode, we test if all FE spaces of the matrix are also ===
// === considered by the mesh distributor. ===
checkFeSpaces(result);
#endif
return result;
}
void PetscSolver::checkFeSpaces(vector<const FiniteElemSpace*>& feSpaces)
{
FUNCNAME("PetscSolver::checkFeSpaces()");
for (unsigned int i = 0; i < feSpaces.size(); i++) {
bool found = false;
for (unsigned int j = 0; j < meshDistributor->getFeSpaces().size(); j++)
if (feSpaces[i] == meshDistributor->getFeSpace(j)) {
found = true;
break;
}
TEST_EXIT(found)("FE spaces not found in mesh distributor!\n");
}
}
} }
...@@ -108,6 +108,16 @@ namespace AMDiS { ...@@ -108,6 +108,16 @@ namespace AMDiS {
void copyVec(Vec& originVec, Vec& destVec, void copyVec(Vec& originVec, Vec& destVec,
vector<int>& originIndex, vector<int>& destIndex); vector<int>& originIndex, vector<int>& destIndex);
/// Checks if all given FE spaces are handled by the mesh distributor.
void checkFeSpaces(vector<const FiniteElemSpace*>& feSpaces);
/// Returns a vector with the FE spaces of each row in the matrix. Thus, the
/// resulting vector may contain the same FE space multiple times.
vector<const FiniteElemSpace*> getFeSpaces(Matrix<DOFMatrix*> *mat);
/// Returns a vector with the FE spaces of all components of a system vector.
vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec);
protected: protected:
MeshDistributor *meshDistributor; MeshDistributor *meshDistributor;
......
...@@ -20,16 +20,14 @@ namespace AMDiS { ...@@ -20,16 +20,14 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()"); FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n"); TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n"); TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime(); double wtime = MPI::Wtime();
int nComponents = mat->getNumRows(); vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
int nRankRows = meshDistributor->getNumberRankDofs(feSpace) * nComponents; createGlobalDofMapping(feSpaces);
int nOverallRows = int nRankRows = meshDistributor->getNumberRankDofs(feSpaces);
meshDistributor->getNumberOverallDofs(feSpace) * nComponents; int nOverallRows = meshDistributor->getNumberOverallDofs(feSpaces);
// === Create PETSc vector (solution and a temporary vector). === // === Create PETSc vector (solution and a temporary vector). ===
...@@ -59,8 +57,13 @@ namespace AMDiS { ...@@ -59,8 +57,13 @@ namespace AMDiS {
// === Create PETSc matrix with the computed nnz data structure. === // === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows, // MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows,
0, d_nnz, 0, o_nnz, &petscMatrix); // nOverallRows, nOverallRows,
// 0, d_nnz, 0, o_nnz, &petscMatrix);
MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows,
nOverallRows, nOverallRows,
30, PETSC_NULL, 30, PETSC_NULL, &petscMatrix);
#if (DEBUG != 0) #if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime); MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
...@@ -69,15 +72,16 @@ namespace AMDiS { ...@@ -69,15 +72,16 @@ namespace AMDiS {
#if (DEBUG != 0) #if (DEBUG != 0)
int a, b; int a, b;
MatGetOwnershipRange(petscMatrix, &a, &b); MatGetOwnershipRange(petscMatrix, &a, &b);
TEST_EXIT(a == meshDistributor->getStartDofs(feSpace) * nComponents) TEST_EXIT(a == meshDistributor->getStartDofs(feSpaces))
("Wrong matrix ownership range!\n"); ("Wrong matrix ownership range!\n");
TEST_EXIT(b == meshDistributor->getStartDofs(feSpace) * nComponents + nRankRows) TEST_EXIT(b == meshDistributor->getStartDofs(feSpaces) + nRankRows)
("Wrong matrix ownership range!\n"); ("Wrong matrix ownership range!\n");
#endif #endif
// === Transfer values from DOF matrices to the PETSc matrix. === // === Transfer values from DOF matrices to the PETSc matrix. ===
int nComponents = mat->getNumRows();
for (int i = 0; i < nComponents; i++) for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++) for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) if ((*mat)[i][j])
...@@ -110,22 +114,19 @@ namespace AMDiS { ...@@ -110,22 +114,19 @@ namespace AMDiS {
void PetscSolverGlobalMatrix::fillPetscRhs(SystemVector *vec) void PetscSolverGlobalMatrix::fillPetscRhs(SystemVector *vec)
{ {
FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()"); FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
TEST_EXIT_DBG(vec)("No DOF vector defined!\n"); TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor defined!\n"); TEST_EXIT_DBG(meshDistributor)("No mesh distributor defined!\n");
int nComponents = vec->getSize(); vector<const FiniteElemSpace*> feSpaces = getFeSpaces(vec);
int nRankRows = meshDistributor->getNumberRankDofs(feSpace) * nComponents; int nRankRows = meshDistributor->getNumberRankDofs(feSpaces);
int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace) * nComponents; int nOverallRows = meshDistributor->getNumberOverallDofs(feSpaces);
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscRhsVec); VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscRhsVec);
// === Transfer values from DOF vector to the PETSc vector. === // === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < vec->getSize(); i++)
for (int i = 0; i < nComponents; i++) setDofVector(petscRhsVec, vec->getDOFVector(i), vec->getSize(), i);
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscRhsVec); VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec); VecAssemblyEnd(petscRhsVec);
...@@ -155,15 +156,16 @@ namespace AMDiS { ...@@ -155,15 +156,16 @@ namespace AMDiS {
KSPSolve(solver, petscRhsVec, petscSolVec); KSPSolve(solver, petscRhsVec, petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. === // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int nRankDofs = meshDistributor->getNumberRankDofs(feSpace);
PetscScalar *vecPointer; PetscScalar *vecPointer;
VecGetArray(petscSolVec, &vecPointer); VecGetArray(petscSolVec, &vecPointer);
int c = 0;
for (int i = 0; i < nComponents; i++) { for (int i = 0; i < nComponents; i++) {
DOFVector<double> &dofvec = *(vec.getDOFVector(i)); DOFVector<double> &dv = *(vec.getDOFVector(i));
int nRankDofs = meshDistributor->getNumberRankDofs(dv.getFeSpace());
for (int j = 0; j < nRankDofs; j++) for (int j = 0; j < nRankDofs; j++)
dofvec[meshDistributor->mapLocalToDofIndex(feSpace, j)] = dv[meshDistributor->mapLocalToDofIndex(feSpace, j)] = vecPointer[c++];
vecPointer[j * nComponents + i];
} }
VecRestoreArray(petscSolVec, &vecPointer); VecRestoreArray(petscSolVec, &vecPointer);
...@@ -200,8 +202,6 @@ namespace AMDiS { ...@@ -200,8 +202,6 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()"); FUNCNAME("PetscSolverGlobalMatrix::setDofMatrix()");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
TEST_EXIT(mat)("No DOFMatrix!\n"); TEST_EXIT(mat)("No DOFMatrix!\n");
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end; using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
...@@ -229,7 +229,8 @@ namespace AMDiS { ...@@ -229,7 +229,8 @@ namespace AMDiS {
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) { cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF. // Global index of the current row DOF.
int globalRowDof = meshDistributor->mapLocalToGlobal(feSpace, *cursor); int globalRowDof =
meshDistributor->mapLocalToGlobal(mat->getRowFeSpace(), *cursor);
// Test if the current row DOF is a periodic DOF. // Test if the current row DOF is a periodic DOF.
bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof); bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
...@@ -237,8 +238,9 @@ namespace AMDiS { ...@@ -237,8 +238,9 @@ namespace AMDiS {
// === Row DOF index is not periodic. === // === Row DOF index is not periodic. ===
// Calculate PETSc row index. // Calculate PETSc row index.
TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(*cursor, dispAddRow)))
int rowIndex = globalRowDof * dispMult + dispAddRow; ("Should not happen!\n");
int rowIndex = dofToGlobalIndex[make_pair(*cursor, dispAddRow)];
cols.clear(); cols.clear();
values.clear(); values.clear();
...@@ -248,11 +250,13 @@ namespace AMDiS { ...@@ -248,11 +250,13 @@ namespace AMDiS {
// Global index of the current column index. // Global index of the current column index.
int globalColDof = int globalColDof =
meshDistributor->mapLocalToGlobal(feSpace, col(*icursor)); meshDistributor->mapLocalToGlobal(mat->getColFeSpace(), col(*icursor));
// Test if the current col dof is a periodic dof. // Test if the current col dof is a periodic dof.
bool periodicCol = meshDistributor->isPeriodicDof(globalColDof); bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
// Calculate PETSc col index. // Calculate PETSc col index.
int colIndex = globalColDof * dispMult + dispAddCol; TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(col(*icursor), dispAddCol)))
("Should not happen!\n");
int colIndex = dofToGlobalIndex[make_pair(col(*icursor), dispAddCol)];
// Ignore all zero entries, expect it is a diagonal entry. // Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex) if (value(*icursor) == 0.0 && rowIndex != colIndex)
...@@ -325,7 +329,8 @@ namespace AMDiS { ...@@ -325,7 +329,8 @@ namespace AMDiS {
icursor != icend; ++icursor) { icursor != icend; ++icursor) {
// Global index of the current column index. // Global index of the current column index.
int globalColDof = meshDistributor->mapLocalToGlobal(feSpace, col(*icursor)); int globalColDof =
meshDistributor->mapLocalToGlobal(mat->getColFeSpace(), col(*icursor));
// Ignore all zero entries, expect it is a diagonal entry. // Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && globalRowDof != globalColDof) if (value(*icursor) == 0.0 && globalRowDof != globalColDof)
...@@ -427,7 +432,7 @@ namespace AMDiS { ...@@ -427,7 +432,7 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalMatrix::setDofVector()"); FUNCNAME("PetscSolverGlobalMatrix::setDofVector()");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); const FiniteElemSpace *feSpace = vec->getFeSpace();
// Traverse all used DOFs in the dof vector. // Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS); DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
...@@ -439,7 +444,9 @@ namespace AMDiS { ...@@ -439,7 +444,9 @@ namespace AMDiS {
DegreeOfFreedom globalRowDof = DegreeOfFreedom globalRowDof =
meshDistributor->mapLocalToGlobal(feSpace, dofIt.getDOFIndex()); meshDistributor->mapLocalToGlobal(feSpace, dofIt.getDOFIndex());
// Calculate PETSc index of the row DOF. // Calculate PETSc index of the row DOF.
int index = globalRowDof * dispMult + dispAdd; TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(dofIt.getDOFIndex(), dispAdd)))
("Should not happen!\n");
int index = dofToGlobalIndex[make_pair(dofIt.getDOFIndex(), dispAdd)];
if (meshDistributor->isPeriodicDof(globalRowDof)) { if (meshDistributor->isPeriodicDof(globalRowDof)) {
std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof); std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
...@@ -652,4 +659,66 @@ namespace AMDiS { ...@@ -652,4 +659,66 @@ namespace AMDiS {
d_nnz[i] = std::min(d_nnz[i], nRankRows); d_nnz[i] = std::min(d_nnz[i], nRankRows);
} }
void PetscSolverGlobalMatrix::createGlobalDofMapping(vector<const FiniteElemSpace*> &feSpaces)
{
FUNCNAME("PetscSolverGlobalMatrix::createGlobalDofMapping()");
int offset = meshDistributor->getStartDofs(feSpaces);
Mesh *mesh = meshDistributor->getMesh();
for (unsigned int i = 0; i < feSpaces.size(); i++) {
// === Create indices for all DOFs in rank' domain. ===
std::set<const DegreeOfFreedom*> rankDofSet;
mesh->getAllDofs(feSpaces[i], rankDofSet);
for (std::set<const DegreeOfFreedom*>::iterator it = rankDofSet.begin();
it != rankDofSet.end(); ++it)
if (meshDistributor->getIsRankDof(feSpaces[i], **it)) {
int localIndex = **it;
int globalIndex =
meshDistributor->mapLocalToGlobal(feSpaces[i], localIndex) -
meshDistributor->getStartDofs(feSpaces[i]) +
offset;
dofToGlobalIndex[make_pair(localIndex, i)] = globalIndex;
}
// === Communicated interior boundary DOFs between domains. ===
StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm());
for (DofComm::Iterator it(meshDistributor->getSendDofs(), feSpaces[i]);
!it.end(); it.nextRank()) {
vector<DegreeOfFreedom> sendGlobalDofs;
for (; !it.endDofIter(); it.nextDof()) {
int globalIndex = dofToGlobalIndex[make_pair(it.getDofIndex(), i)];
sendGlobalDofs.push_back(globalIndex);
}
stdMpi.send(it.getRank(), sendGlobalDofs);
}
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]);
!it.end(); it.nextRank())
stdMpi.recv(it.getRank());
stdMpi.startCommunication();
for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]);
!it.end(); it.nextRank())
for (; !it.endDofIter(); it.nextDof()) {
DegreeOfFreedom localIndex = it.getDofIndex();
int globalIndex = stdMpi.getRecvData(it.getRank())[it.getDofCounter()];
dofToGlobalIndex[make_pair(localIndex, i)] = globalIndex;
}
// === Update offset. ===
offset += meshDistributor->getNumberRankDofs(feSpaces[i]);
}
}
} }
...@@ -67,6 +67,8 @@ namespace AMDiS { ...@@ -67,6 +67,8 @@ namespace AMDiS {
void setDofVector(Vec& petscVec, DOFVector<double>* vec, void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false); int disMult = 1, int dispAdd = 0, bool rankOnly = false);
void createGlobalDofMapping(vector<const FiniteElemSpace*> &feSpaces);
protected: protected:
/// Arrays definig the non zero pattern of Petsc's matrix. /// Arrays definig the non zero pattern of Petsc's matrix.
int *d_nnz, *o_nnz; int *d_nnz, *o_nnz;
...@@ -88,6 +90,8 @@ namespace AMDiS { ...@@ -88,6 +90,8 @@ namespace AMDiS {
/// operators using DOFVectors from old timestep containing many zeros due to /// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields. /// some phase fields.
bool alwaysCreateNnzStructure; bool alwaysCreateNnzStructure;
map<pair<DegreeOfFreedom, int>, int> dofToGlobalIndex;
}; };
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment