Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 23da3114 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Bugfix in parallel domain decomposition with higher order elements.

parent 5f6c2d39
......@@ -241,7 +241,7 @@ namespace AMDiS {
}
void ParallelDomainBase::setDofVector(DOFVector<double>* vec,
void ParallelDomainBase::setDofVector(Vec& petscVec, DOFVector<double>* vec,
int dispMult, int dispAdd)
{
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
......@@ -249,7 +249,7 @@ namespace AMDiS {
int index = mapLocalGlobalDOFs[dofIt.getDOFIndex()] * dispMult + dispAdd;
double value = *dofIt;
VecSetValues(petscRhsVec, 1, &index, &value, ADD_VALUES);
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
}
}
......@@ -277,7 +277,7 @@ namespace AMDiS {
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
setDofVector(vec);
setDofVector(petscRhsVec, vec);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
......@@ -512,7 +512,7 @@ namespace AMDiS {
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
for (int i = 0; i < nComponents; i++)
setDofVector(vec->getDOFVector(i), nComponents, i);
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
......@@ -602,6 +602,15 @@ namespace AMDiS {
{
FUNCNAME("ParallelDomainBase::solvePetscMatrix()");
#if 0
// Set old solution to be initiual guess for petsc solver.
for (int i = 0; i < nComponents; i++)
setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscSolVec);
VecAssemblyEnd(petscSolVec);
#endif
KSP solver;
KSPCreate(PETSC_COMM_WORLD, &solver);
......@@ -612,6 +621,8 @@ namespace AMDiS {
KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
// Do not delete the solution vector, use it for the initial guess.
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
KSPSolve(solver, petscRhsVec, petscSolVec);
......@@ -1188,10 +1199,11 @@ namespace AMDiS {
ElementDofIterator elDofIt(feSpace);
DofSet rankDofSet;
// The vertexDof list must be recreated from the scratch. Otherwise, it is possible
// that it maps dofs, that were removed (this is also possible, if the mesh was
// refined, e.g., center dofs of an element are not dofs of the children).
DofToBool oldVertexDof = vertexDof;
vertexDof.clear();
TraverseStack stack;
......@@ -1237,7 +1249,7 @@ namespace AMDiS {
for (DofContainer::iterator iit = oldSendDofs[it->first].begin();
iit != oldSendDofs[it->first].end(); ++iit)
if (vertexDof[*iit])
if (oldVertexDof[*iit])
dofsToSend.push_back(*iit);
DofContainer dofs;
......@@ -1265,7 +1277,7 @@ namespace AMDiS {
for (DofContainer::iterator iit = oldRecvDofs[it->first].begin();
iit != oldRecvDofs[it->first].end(); ++iit)
if (vertexDof[*iit]) {
if (oldVertexDof[*iit]) {
dofsToRecv.push_back(*iit);
DofContainer::iterator eraseIt = find(rankDofs.begin(), rankDofs.end(), *iit);
......@@ -1334,7 +1346,6 @@ namespace AMDiS {
i++;
}
// === Send new DOF indices. ===
std::vector<int*> sendBuffers(sendDofs.size());
......@@ -1350,12 +1361,11 @@ namespace AMDiS {
sendBuffers[i] = new int[nSendDofs];
int c = 0;
for (DofContainer::iterator dofIt = sendIt->second.begin();
dofIt != sendIt->second.end(); ++dofIt) {
dofIt != sendIt->second.end(); ++dofIt)
sendBuffers[i][c++] = rankDofsNewGlobalIndex[*dofIt];
}
request[requestCounter++] =
mpiComm.Isend(sendBuffers[i], nSendDofs, MPI_INT, sendIt->first, 0);
mpiComm.Isend(sendBuffers[i], nSendDofs, MPI_INT, sendIt->first, 0);
}
i = 0;
......@@ -1363,9 +1373,9 @@ namespace AMDiS {
recvIt != recvDofs.end(); ++recvIt, i++) {
int nRecvDofs = recvIt->second.size();
recvBuffers[i] = new int[nRecvDofs];
request[requestCounter++] =
mpiComm.Irecv(recvBuffers[i], nRecvDofs, MPI_INT, recvIt->first, 0);
mpiComm.Irecv(recvBuffers[i], nRecvDofs, MPI_INT, recvIt->first, 0);
}
MPI::Request::Waitall(requestCounter, request);
......
......@@ -254,7 +254,8 @@ namespace AMDiS {
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
void setDofVector(DOFVector<double>* vec, int disMult = 1, int dispAdd = 0);
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0);
void DbgCreateElementMap(ElementIdxToDofs &elMap);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment