Commit c2460445 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed parallel dirichlet boundary conditions problem.

parent 9a40446c
......@@ -219,11 +219,6 @@ namespace AMDiS {
} else {
for (int j = 0; j < nCol; j++) {
DegreeOfFreedom col = colIndices[j];
// if (MPI::COMM_WORLD.Get_rank() == 0 && row <= 10 && col <= 10) {
// MSG("%d/%d entry: %e\n", row, col, elMat[i][j]);
// }
ins[row][col] += elMat[i][j];
}
}
......
......@@ -247,6 +247,15 @@ namespace AMDiS {
// owner of the row DOFs.
std::map<int, MatrixNnzEntry> sendMatrixEntry;
// First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
typedef std::map<int, DofContainer> RankToDofContainer;
RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
for (RankToDofContainer::iterator it = recvDofs.begin();
it != recvDofs.end(); ++it)
sendMatrixEntry[it->first].resize(0);
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if ((*mat)[i][j]) {
......@@ -347,6 +356,7 @@ namespace AMDiS {
stdMpi.recv(meshDistributor->getSendDofs());
stdMpi.startCommunication<int>(MPI_INT);
// === Evaluate the nnz structure this rank got from other ranks and add it to ===
// === the PETSc nnz data structure. ===
......@@ -412,9 +422,11 @@ namespace AMDiS {
lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
}
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
// 0, PETSC_NULL, 0, PETSC_NULL, &petscMatrix);
0, d_nnz, 0, o_nnz, &petscMatrix);
#if (DEBUG != 0)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment