From 4098e9e10c75e8017dabc9e1699879f2c95b5cc7 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Wed, 7 Mar 2012 14:52:20 +0000 Subject: [PATCH] That's it for today. --- AMDiS/src/ProblemStat.cc | 11 +++++ AMDiS/src/parallel/FeSpaceMapping.cc | 48 +++++++++++++++++++ AMDiS/src/parallel/FeSpaceMapping.h | 4 ++ AMDiS/src/parallel/MeshDistributor.cc | 1 + AMDiS/src/parallel/PetscSolverFeti.cc | 6 +-- AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 3 +- 6 files changed, 67 insertions(+), 6 deletions(-) diff --git a/AMDiS/src/ProblemStat.cc b/AMDiS/src/ProblemStat.cc index dfbb4a2d..992dcf77 100644 --- a/AMDiS/src/ProblemStat.cc +++ b/AMDiS/src/ProblemStat.cc @@ -870,6 +870,17 @@ namespace AMDiS { INFO(info, 8)("buildAfterCoarsen needed %.5f seconds\n", TIME_USED(first, clock())); #endif + +#if 0 + VtkWriter::writeFile(rhs, "rhs0.vtu"); + for (int i = 0; i < nComponents; i++) { + double s = rhs->getDOFVector(i)->sum(); + s /= -static_cast<double>(rhs->getDOFVector(i)->getUsedSize()); + MSG("MOVE RHS: %f\n", s); + add(*(rhs->getDOFVector(i)), s, *(rhs->getDOFVector(i))); + } + VtkWriter::writeFile(rhs, "rhs1.vtu"); +#endif } diff --git a/AMDiS/src/parallel/FeSpaceMapping.cc b/AMDiS/src/parallel/FeSpaceMapping.cc index f48ece76..4e548659 100644 --- a/AMDiS/src/parallel/FeSpaceMapping.cc +++ b/AMDiS/src/parallel/FeSpaceMapping.cc @@ -41,6 +41,8 @@ namespace AMDiS { if (overlap) computeNonLocalIndices(); + + computeMatIndex(0); } @@ -89,6 +91,52 @@ namespace AMDiS { } + void GlobalDofMap::computeMatIndex(int offset) + { + FUNCNAME("GlobalDofMap::computeMatIndex()"); + + map<DegreeOfFreedom, int> dofToMatIndex; + + for (DofMapping::iterator it = dofMap.begin(); it != dofMap.end(); ++it) { + if (!nonRankDofs.count(it->first)) { + int globalMatIndex = it->second - rStartDofs + offset; + dofToMatIndex[it->first] = globalMatIndex; + } + } + + if (!overlap) + return; + + TEST_EXIT_DBG(sendDofs != NULL && recvDofs != NULL) + ("No communicator given!\n"); + + StdMpi<vector<DegreeOfFreedom> > stdMpi(*mpiComm); + for (DofComm::Iterator it(*sendDofs, feSpace); !it.end(); it.nextRank()) { + vector<DegreeOfFreedom> sendGlobalDofs; + + for (; !it.endDofIter(); it.nextDof()) + if (dofMap.count(it.getDofIndex())) + sendGlobalDofs.push_back(dofToMatIndex[it.getDofIndex()]); + + stdMpi.send(it.getRank(), sendGlobalDofs); + } + + for (DofComm::Iterator it(*recvDofs, feSpace); !it.end(); it.nextRank()) + stdMpi.recv(it.getRank()); + + stdMpi.startCommunication(); + + { + int i = 0; + for (DofComm::Iterator it(*recvDofs, feSpace); !it.end(); it.nextRank()) + for (; !it.endDofIter(); it.nextDof()) + if (dofMap.count(it.getDofIndex())) + dofToMatIndex[it.getDofIndex()] = + stdMpi.getRecvData(it.getRank())[i++]; + } + } + + void GlobalDofMap::print() { FUNCNAME("GlobalDofMap::print()"); diff --git a/AMDiS/src/parallel/FeSpaceMapping.h b/AMDiS/src/parallel/FeSpaceMapping.h index 0170e3e3..d75e1a2e 100644 --- a/AMDiS/src/parallel/FeSpaceMapping.h +++ b/AMDiS/src/parallel/FeSpaceMapping.h @@ -46,6 +46,8 @@ namespace AMDiS { GlobalDofMap(MPI::Intracomm* m) : mpiComm(m), feSpace(NULL), + sendDofs(NULL), + recvDofs(NULL), nRankDofs(0), nOverallDofs(0), rStartDofs(0), @@ -102,6 +104,8 @@ namespace AMDiS { void computeNonLocalIndices(); + void computeMatIndex(int offset); + void print(); void setFeSpace(const FiniteElemSpace *fe) diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index 26423202..51e052e1 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -31,6 +31,7 @@ #include "parallel/DofComm.h" #include "io/ElementFileWriter.h" #include "io/MacroInfo.h" +#include "io/MacroWriter.h" #include "io/VtkWriter.h" #include "Mesh.h" #include "Traverse.h" diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 8319276d..bf407e0a 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -260,8 +260,7 @@ namespace AMDiS { primalDofMap[feSpace].update(); MSG("nRankPrimals = %d nOverallPrimals = %d\n", - primalDofMap[feSpace].nRankDofs, - primalDofMap[feSpace].nOverallDofs); + primalDofMap[feSpace].nRankDofs, primalDofMap[feSpace].nOverallDofs); TEST_EXIT_DBG(primals.size() == primalDofMap[feSpace].size()) ("Number of primals %d, but number of global primals on this rank is %d!\n", @@ -433,8 +432,7 @@ namespace AMDiS { localDofMap[feSpace].update(false); - TEST_EXIT_DBG(nLocalInterior + - primalDofMap[feSpace].size() + + TEST_EXIT_DBG(nLocalInterior + primalDofMap[feSpace].size() + dualDofMap[feSpace].size() == static_cast<unsigned int>(admin->getUsedDofs())) ("Should not happen!\n"); diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 888bc3e4..b5300c8d 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -129,7 +129,7 @@ namespace AMDiS { void PetscSolverGlobalMatrix::fillPetscRhs(SystemVector *vec) { FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()"); - + TEST_EXIT_DBG(vec)("No DOF vector defined!\n"); TEST_EXIT_DBG(meshDistributor)("No mesh distributor defined!\n"); @@ -211,7 +211,6 @@ namespace AMDiS { // Print iteration counter and residual norm of the solution. printSolutionInfo(adaptInfo); - // === Destroy PETSc's variables. === VecDestroy(&petscRhsVec); -- GitLab