diff --git a/AMDiS/src/DOFMatrix.cc b/AMDiS/src/DOFMatrix.cc index 4740216e515175df0793e2c3dc1f1a9ce37b9875..96bc116748736d320ecd9d0ab972de0cb5f93255 100644 --- a/AMDiS/src/DOFMatrix.cc +++ b/AMDiS/src/DOFMatrix.cc @@ -57,7 +57,7 @@ namespace AMDiS { FUNCNAME("DOFMatrix::DOFMatrix()"); TEST_EXIT(rowFeSpace)("No fe space for row!\n"); - + if (!colFeSpace) colFeSpace = rowFeSpace; @@ -211,16 +211,18 @@ namespace AMDiS { using namespace mtl; #if 0 - std::cout << "----- PRINT MAT " << rowElInfo->getElement()->getIndex() << "--------" << std::endl; - std::cout << elMat << std::endl; - std::cout << "rows: "; - for (int i = 0; i < rowIndices.size(); i++) - std::cout << rowIndices[i] << " "; - std::cout << std::endl; - std::cout << "cols: "; - for (int i = 0; i < colIndices.size(); i++) - std::cout << colIndices[i] << " "; - std::cout << std::endl; + if (MPI::COMM_WORLD.Get_rank() == 0) { + std::cout << "----- PRINT MAT " << rowElInfo->getElement()->getIndex() << "--------" << std::endl; + std::cout << elMat << std::endl; + std::cout << "rows: "; + for (int i = 0; i < rowIndices.size(); i++) + std::cout << rowIndices[i] << " "; + std::cout << std::endl; + std::cout << "cols: "; + for (int i = 0; i < colIndices.size(); i++) + std::cout << colIndices[i] << " "; + std::cout << std::endl; + } #endif for (int i = 0; i < nRow; i++) { @@ -229,7 +231,7 @@ namespace AMDiS { BoundaryCondition *condition = bound ? boundaryManager->getBoundaryCondition(bound[i]) : NULL; - if (condition && condition->isDirichlet()) { + if (condition && condition->isDirichlet()) { if (condition->applyBoundaryCondition()) { #ifdef HAVE_PARALLEL_DOMAIN_AMDIS if ((*rankDofs)[rowIndices[i]]) @@ -241,8 +243,9 @@ namespace AMDiS { } else { for (int j = 0; j < nCol; j++) { DegreeOfFreedom col = colIndices[j]; - if (fabs(elMat[i][j]) > 1e-10) + if (fabs(elMat[i][j]) > 1e-10) { ins[row][col] += elMat[i][j]; + } } } } @@ -259,7 +262,9 @@ namespace AMDiS { {} - void DOFMatrix::assemble(double factor, ElInfo *elInfo, const BoundaryType *bound) + void DOFMatrix::assemble(double factor, + ElInfo *elInfo, + const BoundaryType *bound) { FUNCNAME("DOFMatrix::assemble()"); @@ -278,7 +283,9 @@ namespace AMDiS { } - void DOFMatrix::assemble(double factor, ElInfo *elInfo, const BoundaryType *bound, + void DOFMatrix::assemble(double factor, + ElInfo *elInfo, + const BoundaryType *bound, Operator *op) { FUNCNAME("DOFMatrix::assemble()"); diff --git a/AMDiS/src/ProblemStat.cc b/AMDiS/src/ProblemStat.cc index 3a8f5b32e849dd35ea5b02c71e6b45272137795d..689ca5f98e89bcbb95449d3be24597285b0c2447 100644 --- a/AMDiS/src/ProblemStat.cc +++ b/AMDiS/src/ProblemStat.cc @@ -1356,6 +1356,10 @@ namespace AMDiS { { FUNCNAME("ProblemStat::addMatrixOperator()"); + TEST_EXIT(i < nComponents && j < nComponents) + ("Cannot add matrix operator at position %d/%d. The stationary problem has only %d components!\n", + i, j, nComponents); + TEST_EXIT(!boundaryConditionSet) ("Do not add operators after boundary conditions were set!\n"); @@ -1402,6 +1406,10 @@ namespace AMDiS { { FUNCNAME("ProblemStat::addVectorOperator()"); + TEST_EXIT(i < nComponents) + ("Cannot add vector operator at position %d. The stationary problem has only %d components!\n", + i, nComponents); + TEST_EXIT(!boundaryConditionSet) ("Do not add operators after boundary conditions were set!\n"); diff --git a/AMDiS/src/SubAssembler.h b/AMDiS/src/SubAssembler.h index ceaff305fe43306da74a1c8ddef1aa86d5070b6a..662f070cdc6a99b8819ba10e5c724d3a06cf4281 100644 --- a/AMDiS/src/SubAssembler.h +++ b/AMDiS/src/SubAssembler.h @@ -64,17 +64,13 @@ namespace AMDiS { /// Destructor virtual ~SubAssembler() {} - /** \brief - * Calculates the element matrix for elInfo and adds it to mat. Memory - * for mat must be provided by the caller. - */ + /// Calculates the element matrix for elInfo and adds it to mat. Memory for + /// mat must be provided by the caller. virtual void calculateElementMatrix(const ElInfo *elInfo, ElementMatrix& mat) = 0; - /** \brief - * Calculates the element vector for elInfo and adds it to vec. Memory - * for vec must be provided by the caller. - */ + /// Calculates the element vector for elInfo and adds it to vec. Memory for + /// vec must be provided by the caller. virtual void calculateElementVector(const ElInfo *elInfo, ElementVector& vec) = 0; @@ -104,10 +100,8 @@ namespace AMDiS { WorldVector<double>* getCoordsAtQPs(const ElInfo* elInfo, Quadrature *quad = NULL); - /** \brief - * DOFVector dv evaluated at quadrature points. - * Used by \ref OperatorTerm::initElement(). - */ + /// DOFVector dv evaluated at quadrature points. + /// Used by \ref OperatorTerm::initElement(). template<typename T> void getVectorAtQPs(DOFVectorBase<T>* dv, const ElInfo* elInfo, @@ -122,10 +116,8 @@ namespace AMDiS { Quadrature *quad, mtl::dense_vector<T>& vecAtQPs); - /** \brief - * Gradients of DOFVector dv evaluated at quadrature points. - * Used by \ref OperatorTerm::initElement(). - */ + /// Gradients of DOFVector dv evaluated at quadrature points. + /// Used by \ref OperatorTerm::initElement(). template<typename T> void getGradientsAtQPs( DOFVectorBase<T>* dv, const ElInfo* elInfo, @@ -139,11 +131,10 @@ namespace AMDiS { Quadrature *quad, mtl::dense_vector<typename GradientType<T>::type>& grdAtQPs); - /** \brief - * The comp'th component of the derivative of DOFVector dv evaluated at quadrature points. - * Used by \ref OperatorTerm::initElement(). - * Attention: not caching at the moment! Using cache if gradients for read but not for write - */ + /// The comp'th component of the derivative of DOFVector dv evaluated at + /// quadrature points. Used by \ref OperatorTerm::initElement(). + /// Attention: not caching at the moment! Using cache if gradients for read + /// but not for write. template<typename T> void getDerivativeAtQPs(DOFVectorBase<T>* dv, const ElInfo* elInfo, @@ -159,11 +150,9 @@ namespace AMDiS { int comp, mtl::dense_vector<T>& grdAtQPs); - /** \brief - * Called once for each ElInfo when \ref calculateElementMatrix() or - * \ref calculateElementVector() is called for the first time for this - * Element. - */ + /// Called once for each ElInfo when \ref calculateElementMatrix() or + /// \ref calculateElementVector() is called for the first time for this + /// Element. virtual void initElement(const ElInfo *smallElInfo, const ElInfo *largeElInfo = NULL, Quadrature *quad = NULL); @@ -202,16 +191,12 @@ namespace AMDiS { /// Column FiniteElemSpace. const FiniteElemSpace *colFeSpace; - /** \brief - * Number of rows of the element matrix and length of the element - * vector. Is equal to the number of row basis functions - */ + /// Number of rows of the element matrix and length of the element + /// vector. Is equal to the number of row basis functions int nRow; - /** \brief - * Number of columns of the element matrix. Is equal to the number - * of column basis functions - */ + /// Number of columns of the element matrix. Is equal to the number + /// of column basis functions int nCol; /// Used for \ref getVectorAtQPs() and \ref getGradientsAtQPs(). @@ -230,16 +215,15 @@ namespace AMDiS { std::map<const void*, ValuesAtQPs* > cachedValuesAtQPs; std::map<const void*, ValuesAtQPs* > cachedGradientsAtQPs; - /** \brief - * Set and updated by \ref initElement() for each ElInfo. - * coordsAtQPs[i] points to the coordinates of the i-th quadrature point. - */ + /// Set and updated by \ref initElement() for each ElInfo. + /// coordsAtQPs[i] points to the coordinates of the i-th quadrature point. WorldVector<double> *coordsAtQPs; /// Used for \ref getCoordsAtQPs(). bool coordsValid; - /// Used for \ref getCoordsAtQP(). Stores the number of allocated WorldVectors. + /// Used for \ref getCoordsAtQP(). Stores the number of allocated + /// WorldVectors. int coordsNumAllocated; /// Quadrature object to be used for assembling. diff --git a/AMDiS/src/Tetrahedron.cc b/AMDiS/src/Tetrahedron.cc index 130f78203136c068238fdb786839f8e7dfbe90cf..c590c795ce6f68038d0f97bfc397069f55cedd04 100644 --- a/AMDiS/src/Tetrahedron.cc +++ b/AMDiS/src/Tetrahedron.cc @@ -376,6 +376,48 @@ namespace AMDiS { return; break; case EDGE: + { + // === Create boundary information objects for children elements. === + + BoundaryObject nextBound0 = bound; + prepareNextBound(nextBound0, 0); + + BoundaryObject nextBound1 = bound; + prepareNextBound(nextBound1, 1); + + // === Check for boundary on children elements. === + + if ((nextBound0.ithObj >= 0 || nextBound1.ithObj >= 0) && child[0]) { + // So, the edge is contained in at least on of the children and the + // element is also refined. Then we have go down further in refinement + // hierarchie. + + if (bound.reverseMode) { + if (nextBound1.ithObj >= 0) + child[1]->getHigherOrderDofs(feSpace, nextBound1, dofs); + if (nextBound0.ithObj >= 0) + child[0]->getHigherOrderDofs(feSpace, nextBound0, dofs); + } else { + if (nextBound0.ithObj >= 0) + child[0]->getHigherOrderDofs(feSpace, nextBound0, dofs); + if (nextBound1.ithObj >= 0) + child[1]->getHigherOrderDofs(feSpace, nextBound1, dofs); + } + } else { + // Either the edge is not contained in further refined children, or + // the element is not refined further on this edge. Then we can get + // all the DOFs on this edge. + + ElementDofIterator elDofIter(feSpace, true); + elDofIter.reset(this); + do { + if (elDofIter.getCurrentPos() == 1 && + elDofIter.getCurrentElementPos() == bound.ithObj) + dofs.push_back(elDofIter.getDofPtr()); + } while(elDofIter.next()); + } + } + break; case FACE: { @@ -403,10 +445,8 @@ namespace AMDiS { elDofIter.reset(this); do { if (elDofIter.getCurrentPos() == 2 && - elDofIter.getCurrentElementPos() == bound.ithObj) { - ERROR_EXIT("Check this, if it will really work!\n"); + elDofIter.getCurrentElementPos() == bound.ithObj) dofs.push_back(elDofIter.getDofPtr()); - } } while(elDofIter.next()); } } diff --git a/AMDiS/src/Tetrahedron.h b/AMDiS/src/Tetrahedron.h index 492c928736b2ec29cc36f734639c5b88e60e1c71..b801be84e414b8db99a87059509d5d7331976684 100644 --- a/AMDiS/src/Tetrahedron.h +++ b/AMDiS/src/Tetrahedron.h @@ -324,10 +324,10 @@ namespace AMDiS { static const int sideOfChild[3][2][4]; /** \brief - * edgeOfChild[elType][i][j] is the local edge number of the j-th edge within the - * i-th children of an element of elType. If the value is -1, the edge is not - * included in the element's child. Note that the 0 edge is included in both - * children only by its half. + * edgeOfChild[elType][i][j] is the local edge number of the j-th edge within + * the i-th children of an element of elType. If the value is -1, the edge is + * not included in the element's child. Note that the 0 edge is included in + * both children only by its half. */ static const int edgeOfChild[3][2][6]; diff --git a/AMDiS/src/Triangle.cc b/AMDiS/src/Triangle.cc index f502c91ed5aa060e2158736556eea660ddf95cd8..8fd3b78065f82ddf999c7b5924890075f79ffb2d 100644 --- a/AMDiS/src/Triangle.cc +++ b/AMDiS/src/Triangle.cc @@ -169,7 +169,7 @@ namespace AMDiS { break; default: ERROR_EXIT("Should never happen!\n"); - } + } } diff --git a/AMDiS/src/ZeroOrderAssembler.cc b/AMDiS/src/ZeroOrderAssembler.cc index b9b3d189972f59e955cc15a063ab3650eb86ea8e..a83b09023074249c389fe7c38f6e8ddc3e67ec47 100644 --- a/AMDiS/src/ZeroOrderAssembler.cc +++ b/AMDiS/src/ZeroOrderAssembler.cc @@ -77,9 +77,9 @@ namespace AMDiS { newAssembler = new StandardZOA(op, assembler, quad); } else { if (pwConst) - newAssembler = new PrecalcZOA(op, assembler, quad); + newAssembler = new PrecalcZOA(op, assembler, quad); else - newAssembler = new FastQuadZOA(op, assembler, quad); + newAssembler = new FastQuadZOA(op, assembler, quad); } subAssemblers->push_back(newAssembler); diff --git a/AMDiS/src/parallel/ElementObjectData.cc b/AMDiS/src/parallel/ElementObjectData.cc index 7dd8f0495540048f667f6783b1b66153ed5fd58b..177d5a5b8e36fa0d5a01bc51af81d72f7d8b240b 100644 --- a/AMDiS/src/parallel/ElementObjectData.cc +++ b/AMDiS/src/parallel/ElementObjectData.cc @@ -85,9 +85,12 @@ namespace AMDiS { periodicFaces[make_pair(face0, face1)] = elInfo->getBoundary(i); /// Add all three vertices of the face to be periodic. - periodicVertices[make_pair(face0.get<0>(), face1.get<0>())] = boundaryType; - periodicVertices[make_pair(face0.get<1>(), face1.get<1>())] = boundaryType; - periodicVertices[make_pair(face0.get<2>(), face1.get<2>())] = boundaryType; + periodicVertices[make_pair(face0.get<0>(), face1.get<0>())] = + boundaryType; + periodicVertices[make_pair(face0.get<1>(), face1.get<1>())] = + boundaryType; + periodicVertices[make_pair(face0.get<2>(), face1.get<2>())] = + boundaryType; periodicDofAssoc[face0.get<0>()].insert(boundaryType); periodicDofAssoc[face0.get<1>()].insert(boundaryType); @@ -131,8 +134,8 @@ namespace AMDiS { TEST_EXIT_DBG(mesh)("Mesh not set!\n"); - // === Return, if there are no periodic vertices, i.e., there are no no === - // === periodic boundaries in the mesh. === + // === Return, if there are no periodic vertices, i.e., there are no === + // === periodic boundaries in the mesh. === if (periodicVertices.size() == 0) return; @@ -141,9 +144,9 @@ namespace AMDiS { // === Get all vertex DOFs that have multiple periodic associations. === // We group all vertices together, that have either two or three periodic - // associations. For rectangular domains in 2D, the four corner vertices have all - // two periodic associations. For box domains in 3D, the eight corner vertices - // have all three periodic associations. + // associations. For rectangular domains in 2D, the four corner vertices have + // all two periodic associations. For box domains in 3D, the eight corner + // vertices have all three periodic associations. vector<DegreeOfFreedom> multPeriodicDof2, multPeriodicDof3; for (map<DegreeOfFreedom, std::set<BoundaryType> >::iterator it = periodicDofAssoc.begin(); diff --git a/AMDiS/src/parallel/ElementObjectData.h b/AMDiS/src/parallel/ElementObjectData.h index 2921318dd8fa1250eb66f78da2f05a3be050285b..42123d2b2b69486351b4db68603f76a082ddb49a 100644 --- a/AMDiS/src/parallel/ElementObjectData.h +++ b/AMDiS/src/parallel/ElementObjectData.h @@ -126,11 +126,11 @@ namespace AMDiS { /** \brief * Creates final data of the periodic boundaries. Must be called after all * elements of the mesh are added to the object database. Then this functions - * search for interectly connected vertices in periodic boundaries. This is only - * the case, if there are more than one boundary conditions. Then, e.g., in 2D, - * all edges of a square are iterectly connected. In 3D, if the macro mesh is a - * box, all eight vertex nodes and always four of the 12 edges are iterectly - * connected. + * search for indirectly connected vertices in periodic boundaries. This is + * only the case, if there are more than one boundary conditions. Then, e.g., + * in 2D, all edges of a square are iterectly connected. In 3D, if the macro + * mesh is a box, all eight vertex nodes and always four of the 12 edges are + * indirectly connected. */ void createPeriodicData(const FiniteElemSpace *feSpace); diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index 121b6449bcadaf92424af6960d80b05d0ab0badc..9b0d6349a17ddee4edf403db0114399d844bb926 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -289,17 +289,17 @@ namespace AMDiS { // We have to remove the VertexVectors, which contain periodic assoiciations, - // because they are not valid anymore after some macro elements have been removed - // and the corresponding DOFs were deleted. + // because they are not valid anymore after some macro elements have been + // removed and the corresponding DOFs were deleted. for (map<BoundaryType, VertexVector*>::iterator it = mesh->getPeriodicAssociations().begin(); it != mesh->getPeriodicAssociations().end(); ++it) const_cast<DOFAdmin&>(mesh->getDofAdmin(0)).removeDOFContainer(dynamic_cast<DOFContainer*>(it->second)); updateLocalGlobalNumbering(); - // === In 3D we have to make some test, if the resulting mesh is valid. If === - // === it is not valid, there is no possiblity yet to fix this problem, just === - // === exit with an error message. === + // === In 3D we have to make some test, if the resulting mesh is valid. === + // === If it is not valid, there is no possiblity yet to fix this === + // === problem, just exit with an error message. === check3dValidMesh(); @@ -320,7 +320,7 @@ namespace AMDiS { // === Create periodic DOF mapping, if there are periodic boundaries. === - createPeriodicMap(feSpaces[0]); + createPeriodicMap(); #if (DEBUG != 0) ParallelDebug::testPeriodicBoundary(*this); @@ -335,25 +335,20 @@ namespace AMDiS { refineManager->globalRefine(mesh, globalRefinement); updateLocalGlobalNumbering(); - // === Update periodic mapping, if there are periodic boundaries. === - createPeriodicMap(feSpaces[0]); + createPeriodicMap(); #if (DEBUG != 0) ParallelDebug::testPeriodicBoundary(*this); #endif } - - /// === Set DOF rank information to all matrices and vectors. === - + // Set DOF rank information to all matrices and vectors. setRankDofs(); - - // === Remove periodic boundary conditions in sequential problem definition. === - + // Remove periodic boundary conditions in sequential problem definition. removePeriodicBoundaryConditions(); initialized = true; @@ -364,6 +359,9 @@ namespace AMDiS { { FUNCNAME("MeshDistributor::addProblemStat()"); + TEST_EXIT_DBG(probStat->getFeSpaces().size()) + ("No FE spaces in stationary problem!\n"); + // === Add FE spaces from stationary problem to mesh distributor. === for (unsigned int i = 0; i < probStat->getFeSpaces().size(); i++) { @@ -845,21 +843,20 @@ namespace AMDiS { #endif - // === Because the mesh has been changed, update the DOF numbering and mappings. === - + // Because the mesh has been changed, update the DOF numbering and mappings. updateLocalGlobalNumbering(); + // Update periodic mapping, if there are periodic boundaries. + createPeriodicMap(); - // === Update periodic mapping, if there are periodic boundaries. === - - createPeriodicMap(feSpaces[0]); #if (DEBUG != 0) ParallelDebug::testPeriodicBoundary(*this); #endif - // === The mesh has changed, so check if it is required to repartition the mesh. === + // === The mesh has changed, so check if it is required to repartition === + // === the mesh. === nMeshChangesAfterLastRepartitioning++; @@ -900,7 +897,8 @@ namespace AMDiS { maxDofs = std::max(maxDofs, nDofsInRank[i]); } int avrgDofs = nOverallDofs / mpiSize; - double imbalance = (static_cast<double>(maxDofs - avrgDofs) / avrgDofs) * 100.0; + double imbalance = + (static_cast<double>(maxDofs - avrgDofs) / avrgDofs) * 100.0; MSG("Imbalancing factor: %.1f\%\n", imbalance); } @@ -1202,7 +1200,8 @@ namespace AMDiS { it != newMacroEl.end(); ++it) { MacroElement *mel = *it; - // First, reset all neighbour relations. The correct neighbours will be set later. + // First, reset all neighbour relations. The correct neighbours will be + // set later. for (int i = 0; i < mesh->getGeo(NEIGH); i++) mel->setNeighbour(i, NULL); @@ -1259,7 +1258,8 @@ namespace AMDiS { stdMpi2.startCommunication(); - // === Adapte the new macro elements due to the received mesh structure codes. === + // === Adapte the new macro elements due to the received mesh === + // === structure codes. === for (map<int, vector<int> >::iterator it = partitioner->getRecvElements().begin(); it != partitioner->getRecvElements().end(); ++it) { @@ -1348,7 +1348,7 @@ namespace AMDiS { // === Update periodic mapping, if there are periodic boundaries. === - createPeriodicMap(feSpaces[0]); + createPeriodicMap(); #if (DEBUG != 0) @@ -1435,25 +1435,21 @@ namespace AMDiS { macroElIndexMap[el->getIndex()] = el; macroElIndexTypeMap[el->getIndex()] = elInfo->getType(); - // === Add all sub object of the element to the variable elObjects. === + // Add all sub object of the element to the variable elObjects. elObjects.addElement(elInfo); elInfo = stack.traverseNext(elInfo); } - // === Create periodic data, if there are periodic boundary conditions. === - if (elObjects.hasPeriodicData()) { - TEST_EXIT(feSpaces.size() == 1) - ("Sebastian: Na, dass funktioniert auch noch nicht mit mehreren FE spaces. Du weisst schon, wen du jetzt mobben kannst :)!\n"); - } + // Create periodic data, if there are periodic boundary conditions. elObjects.createPeriodicData(feSpaces[0]); - // === Create data about the reverse modes of neighbouring elements. === + // Create data about the reverse modes of neighbouring elements. elObjects.createReverseModeData(feSpaces[0], macroElIndexMap, macroElIndexTypeMap); - // === Create mesh element data for this rank. === + // Create mesh element data for this rank. elObjects.createRankData(partitionMap); } @@ -1482,7 +1478,8 @@ namespace AMDiS { int owner = elObjects.getIterateOwner(); ElementObjectData& rankBoundEl = objData[mpiRank]; - TEST_EXIT_DBG(macroElIndexMap[rankBoundEl.elIndex])("Should not happen!\n"); + TEST_EXIT_DBG(macroElIndexMap[rankBoundEl.elIndex]) + ("Should not happen!\n"); AtomicBoundary bound; bound.rankObj.el = macroElIndexMap[rankBoundEl.elIndex]; @@ -1555,12 +1552,8 @@ namespace AMDiS { if (elObjects.isInRank(it->first.first, mpiRank) == false) continue; - TEST_EXIT(feSpaces.size() == 1)("Does not work for multiple fe spaces!\n"); - WorldVector<double> c0, c1; - mesh->getDofIndexCoords(it->first.first, feSpaces[0], c0); - mesh->getDofIndexCoords(it->first.second, feSpaces[0], c1); - - ElementObjectData& perDofEl0 = elObjects.getElementsInRank(it->first.first)[mpiRank]; + ElementObjectData& perDofEl0 = + elObjects.getElementsInRank(it->first.first)[mpiRank]; for (map<int, ElementObjectData>::iterator elIt = elObjects.getElementsInRank(it->first.second).begin(); elIt != elObjects.getElementsInRank(it->first.second).end(); ++elIt) { @@ -1665,16 +1658,19 @@ namespace AMDiS { b = bound; if (mpiRank > otherElementRank) - b.neighObj.reverseMode = elObjects.getFaceReverseMode(perFaceEl0, perFaceEl1); + b.neighObj.reverseMode = + elObjects.getFaceReverseMode(perFaceEl0, perFaceEl1); else - b.rankObj.reverseMode = elObjects.getFaceReverseMode(perFaceEl0, perFaceEl1); + b.rankObj.reverseMode = + elObjects.getFaceReverseMode(perFaceEl0, perFaceEl1); } } - // === Once we have this information, we must care about the order of the atomic === - // === bounds in the three boundary handling object. Eventually all the bound- === - // === aries have to be in the same order on both ranks that share the bounday. === + // === Once we have this information, we must care about the order of the === + // === atomic bounds in the three boundary handling object. Eventually === + // === all the boundaries have to be in the same order on both ranks that === + // === share the bounday. === StdMpi<vector<AtomicBoundary> > stdMpi(mpiComm); stdMpi.send(myIntBoundary.boundary); @@ -1682,16 +1678,17 @@ namespace AMDiS { stdMpi.startCommunication(); - // === The information about all neighbouring boundaries has been received. So === - // === the rank tests if its own atomic boundaries are in the same order. If === - // === not, the atomic boundaries are swaped to the correct order. === + // === The information about all neighbouring boundaries has been === + // === received. So the rank tests if its own atomic boundaries are in === + // === the same order. If not, the atomic boundaries are swaped to the === + // === correct order. === for (RankToBoundMap::iterator rankIt = otherIntBoundary.boundary.begin(); rankIt != otherIntBoundary.boundary.end(); ++rankIt) { - // === We have received from rank "rankIt->first" the ordered list of element === - // === indices. Now, we have to sort the corresponding list in this rank to === - // === get the same order. === + // === We have received from rank "rankIt->first" the ordered list of === + // === element indices. Now, we have to sort the corresponding list in === + // === this rank to get the same order. === for (unsigned int j = 0; j < rankIt->second.size(); j++) { @@ -1706,7 +1703,8 @@ namespace AMDiS { if ((rankIt->second)[k].neighObj == recvedBound) break; - // The element must always be found, because the list is just in another order. + // The element must always be found, because the list is just in + // another order. TEST_EXIT_DBG(k < rankIt->second.size())("Should never happen!\n"); // Swap the current with the found element. @@ -1747,8 +1745,10 @@ namespace AMDiS { continue; for (unsigned int j = 0; j < rankIt->second.size(); j++) { - BoundaryObject &recvRankObj = stdMpi.getRecvData()[rankIt->first][j].rankObj; - BoundaryObject &recvNeighObj = stdMpi.getRecvData()[rankIt->first][j].neighObj; + BoundaryObject &recvRankObj = + stdMpi.getRecvData()[rankIt->first][j].rankObj; + BoundaryObject &recvNeighObj = + stdMpi.getRecvData()[rankIt->first][j].neighObj; if (periodicBoundary.boundary[rankIt->first][j].neighObj != recvRankObj || periodicBoundary.boundary[rankIt->first][j].rankObj != recvNeighObj) { @@ -1827,16 +1827,13 @@ namespace AMDiS { } } } else { - for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) { + for (InteriorBoundary::iterator it(myIntBoundary); !it.end(); ++it) it->rankObj.el->getAllDofs(feSpace, it->rankObj, sendDofs.getDofCont(it.getRank(), feSpace)); - - } - for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) { + for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it) it->rankObj.el->getAllDofs(feSpace, it->rankObj, recvDofs.getDofCont(it.getRank(), feSpace)); - } } // === Delete all empty DOF send and recv positions === @@ -2008,32 +2005,41 @@ namespace AMDiS { } - void MeshDistributor::createPeriodicMap(const FiniteElemSpace *feSpace) + void MeshDistributor::createPeriodicMap() { FUNCNAME("MeshDistributor::createPeriodicMap()"); if (periodicBoundary.boundary.size() == 0) return; - TEST_EXIT(feSpaces.size() > 1) - ("Nanu, schon wieder Arbeit fuer den Thomas! Auch das hier muss geaendert werden fuer den Einsatz von unterschiedlichen FE Dingern!\n"); - - // Clear all periodic dof mappings calculated before. We do it from scratch. - periodicDof.clear(); + // Clear all periodic DOF mappings calculated before. We do it from scratch. + periodicDofs.clear(); + periodicDofMap.clear(); periodicDofAssociations.clear(); + for (unsigned int i = 0; i < feSpaces.size(); i++) + createPeriodicMap(feSpaces[i]); + } + + + void MeshDistributor::createPeriodicMap(const FiniteElemSpace *feSpace) + { + FUNCNAME("MeshDistributor::createPeriodicMap()"); + StdMpi<vector<int> > stdMpi(mpiComm, false); - // === Each rank traverse its periodic boundaries and sends the DOF indices === - // === to the rank "on the other side" of the periodic boundary. === + // === Each rank traverse its periodic boundaries and sends the DOF === + // === indices to the rank "on the other side" of the periodic boundary. === - RankToDofContainer rankPeriodicDofs; map<int, vector<int> > rankToDofType; for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin(); it != periodicBoundary.boundary.end(); ++it) { if (it->first == mpiRank) { + // Here we have a periodic boundary within rank's subdomain. So we can + // compute the periodic mappings without communication. + TEST_EXIT_DBG(it->second.size() % 2 == 0)("Should not happen!\n"); for (unsigned int i = 0; i < it->second.size(); i++) { @@ -2058,16 +2064,18 @@ namespace AMDiS { DegreeOfFreedom globalDof1 = dofFeData[feSpace].mapLocalGlobalDofs[*(dofs1[j])]; - if (periodicDofAssociations[globalDof0].count(type) == 0) { - periodicDof[type][globalDof0] = globalDof1; - periodicDofAssociations[globalDof0].insert(type); + if (periodicDofAssociations[feSpace][globalDof0].count(type) == 0) { + periodicDofMap[feSpace][type][globalDof0] = globalDof1; + periodicDofAssociations[feSpace][globalDof0].insert(type); } } } } else { + // Here we have a periodic boundary between two ranks. + // Create DOF indices on the boundary. - DofContainer& dofs = rankPeriodicDofs[it->first]; + DofContainer& dofs = periodicDofs.getDofCont(it->first, feSpace); for (vector<AtomicBoundary>::iterator boundIt = it->second.begin(); boundIt != it->second.end(); ++boundIt) { @@ -2092,30 +2100,30 @@ namespace AMDiS { stdMpi.startCommunication(); - // === The rank has received the dofs from the rank on the other side of === + // === The rank has received the DOFs from the rank on the other side of === // === the boundary. Now it can use them to create the mapping between === - // === the periodic dofs in this rank and the corresponding periodic === - // === dofs from the other ranks. === + // === the periodic DOFs in this rank and the corresponding periodic === + // === DOFs from the other ranks. === for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin(); it != periodicBoundary.boundary.end(); ++it) { - DofContainer& dofs = rankPeriodicDofs[it->first]; + DofContainer& dofs = periodicDofs.getDofCont(it->first, feSpace); vector<int>& types = rankToDofType[it->first]; TEST_EXIT_DBG(dofs.size() == types.size())("Should not happen!\n"); - // Added the received dofs to the mapping. + // Added the received DOFs to the mapping. for (unsigned int i = 0; i < dofs.size(); i++) { int globalDofIndex = dofFeData[feSpace].mapLocalGlobalDofs[*(dofs[i])]; int mapGlobalDofIndex = stdMpi.getRecvData(it->first)[i]; BoundaryType type = types[i]; - // Check if this global dof with the corresponding boundary type was + // Check if this global DOF with the corresponding boundary type was // not added before by another periodic boundary from other rank. - if (periodicDofAssociations[globalDofIndex].count(type) == 0) { - periodicDof[type][globalDofIndex] = mapGlobalDofIndex; - periodicDofAssociations[globalDofIndex].insert(type); + if (periodicDofAssociations[feSpace][globalDofIndex].count(type) == 0) { + periodicDofMap[feSpace][type][globalDofIndex] = mapGlobalDofIndex; + periodicDofAssociations[feSpace][globalDofIndex].insert(type); } } } @@ -2143,15 +2151,16 @@ namespace AMDiS { DegreeOfFreedom globalDof = dofFeData[feSpace].mapLocalGlobalDofs[*dofs[i]]; - TEST_EXIT_DBG(periodicDofAssociations.count(globalDof)) + TEST_EXIT_DBG(periodicDofAssociations[feSpace].count(globalDof)) ("Should hot happen!\n"); - for (std::set<BoundaryType>::iterator perAscIt = periodicDofAssociations[globalDof].begin(); - perAscIt != periodicDofAssociations[globalDof].end(); ++perAscIt) + for (std::set<BoundaryType>::iterator perAscIt = periodicDofAssociations[feSpace][globalDof].begin(); + perAscIt != periodicDofAssociations[feSpace][globalDof].end(); ++perAscIt) if (*perAscIt >= -3) { - TEST_EXIT_DBG(periodicDof[*perAscIt].count(globalDof) == 1) + TEST_EXIT_DBG(periodicDofMap[feSpace][*perAscIt].count(globalDof) == 1) ("Should not happen!\n"); - perObjMap[*perAscIt][globalDof] = periodicDof[*perAscIt][globalDof]; + perObjMap[*perAscIt][globalDof] = + periodicDofMap[feSpace][*perAscIt][globalDof]; } } } @@ -2168,11 +2177,11 @@ namespace AMDiS { perIt != it->second.end(); ++perIt) { for (DofMapping::iterator dofIt = perIt->second.begin(); dofIt != perIt->second.end(); ++dofIt) { - TEST_EXIT_DBG(periodicDof[perIt->first].count(dofIt->second) == 0 || - periodicDof[perIt->first][dofIt->second] == dofIt->first) + TEST_EXIT_DBG(periodicDofMap[feSpace][perIt->first].count(dofIt->second) == 0 || + periodicDofMap[feSpace][perIt->first][dofIt->second] == dofIt->first) ("Should not happen!\n"); - periodicDof[perIt->first][dofIt->second] = dofIt->first; + periodicDofMap[feSpace][perIt->first][dofIt->second] = dofIt->first; } } } @@ -2266,9 +2275,11 @@ namespace AMDiS { SerUtil::serialize(out, dofFeData[feSpaces[i]].mapLocalDofIndex); } + for (unsigned int i = 0; i < nFeSpace; i++) + serialize(out, periodicDofMap[feSpaces[i]]); - serialize(out, periodicDof); - serialize(out, periodicDofAssociations); + for (unsigned int i = 0; i < nFeSpace; i++) + serialize(out, periodicDofAssociations[feSpaces[i]]); SerUtil::serialize(out, macroElementNeighbours); @@ -2342,8 +2353,11 @@ namespace AMDiS { } - deserialize(in, periodicDof); - deserialize(in, periodicDofAssociations); + for (unsigned int i = 0; i < nFeSpace; i++) + deserialize(in, periodicDofMap[feSpaces[i]]); + + for (unsigned int i = 0; i < nFeSpace; i++) + deserialize(in, periodicDofAssociations[feSpaces[i]]); SerUtil::deserialize(in, macroElementNeighbours); diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h index bcfbfd27081d868118ae64f1d105661eda542c00..8953f16b022e47b47c61e1d0ea085365b943cb85 100644 --- a/AMDiS/src/parallel/MeshDistributor.h +++ b/AMDiS/src/parallel/MeshDistributor.h @@ -278,47 +278,57 @@ namespace AMDiS { } /// Returns the periodic mapping for all boundary DOFs in rank. - inline PeriodicDofMap& getPeriodicMapping() + inline PeriodicDofMap& getPeriodicMapping(const FiniteElemSpace *feSpace) { - return periodicDof; + return periodicDofMap[feSpace]; } - /// Returns for a global dof index its periodic mapping for a given - /// boundary type. - inline int getPeriodicMapping(int globalDofIndex, BoundaryType type) + /// Returns for a global DOF index of a given FE space its periodic mapping + /// for a given boundary type. + inline int getPeriodicMapping(const FiniteElemSpace *feSpace, + BoundaryType type, + int globalDofIndex) { FUNCNAME("MeshDistributor::getPeriodicMapping()"); - TEST_EXIT_DBG(periodicDof[type].count(globalDofIndex) == 1) + TEST_EXIT_DBG(periodicDofMap.count(feSpace))("Should not happen!\n"); + TEST_EXIT_DBG(periodicDofMap[feSpace][type].count(globalDofIndex) == 1) ("There is no periodic association for global DOF %d for boundary type %d!\n", globalDofIndex, type); - return periodicDof[type][globalDofIndex]; + return periodicDofMap[feSpace][type][globalDofIndex]; } /// For a given global DOF index, this function returns the set of periodic /// associations, i.e., the boundary types the DOF is associated to, for /// this DOF. - inline std::set<BoundaryType>& getPerDofAssociations(int globalDofIndex) - { - TEST_EXIT_DBG(periodicDofAssociations.count(globalDofIndex)) + inline std::set<BoundaryType>& getPerDofAssociations(const FiniteElemSpace* feSpace, + int globalDofIndex) + { + FUNCNAME("MeshDistributor::getPerDofAssociations()"); + + TEST_EXIT_DBG(periodicDofAssociations.count(feSpace)) + ("Should not happen!\n"); + TEST_EXIT_DBG(periodicDofAssociations[feSpace].count(globalDofIndex)) ("Should not happen!\n"); - return periodicDofAssociations[globalDofIndex]; + return periodicDofAssociations[feSpace][globalDofIndex]; } /// Returns true, if the DOF (global index) is a periodic DOF. - inline bool isPeriodicDof(int globalDofIndex) + inline bool isPeriodicDof(const FiniteElemSpace *feSpace, int globalDofIndex) { - return (periodicDofAssociations.count(globalDofIndex) > 0 && - periodicDofAssociations[globalDofIndex].size() > 0); + return (periodicDofAssociations[feSpace].count(globalDofIndex) > 0 && + periodicDofAssociations[feSpace][globalDofIndex].size() > 0); } - /// Returns true, if the DOF (global index) is a periodic DOF for the given - /// boundary type. - inline bool isPeriodicDof(int globalDofIndex, BoundaryType type) + /// Returns true, if the DOF (global index) of a given FE space is a + /// periodic DOF for the given boundary type. + inline bool isPeriodicDof(const FiniteElemSpace *feSpace, + BoundaryType type, + int globalDofIndex) { - return (periodicDof[type].count(globalDofIndex) > 0); + return (periodicDofMap[feSpace][type].count(globalDofIndex) > 0); } DofComm& getSendDofs() @@ -331,6 +341,11 @@ namespace AMDiS { return recvDofs; } + DofComm& getPeriodicDofs() + { + return periodicDofs; + } + /// Return true, if the given DOF is owned by the rank. If false, the DOF /// is in rank's partition, but is owned by some other rank. inline bool getIsRankDof(const FiniteElemSpace *feSpace, DegreeOfFreedom dof) @@ -470,11 +485,15 @@ namespace AMDiS { /// changed. void updateLocalGlobalNumbering(const FiniteElemSpace *feSpace); + /// Calls \ref createPeriodicMap(feSpace) for all FE spaces that are + /// handled by the mesh distributor. + void createPeriodicMap(); + /** \brief - * Creates to all dofs in rank's partition that are on a periodic boundary - * the mapping from dof index to the other periodic dof indices. This - * information is stored in \ref periodicDof. - */ + * Creates, for a specific FE space, to all DOFs in rank's partition that + * are on a periodic boundary the mapping from dof index to the other + * periodic dof indices. This information is stored in \ref periodicDofMap. + */ void createPeriodicMap(const FiniteElemSpace *feSpace); /** \brief @@ -683,13 +702,21 @@ namespace AMDiS { */ DofComm recvDofs; + /** \brief + * This map contains on each rank a list of DOFs along the interior bound- + * aries to communicate with other ranks. The DOF indices are given in rank's + * local numbering. Periodic boundaries within one subdomain are not + * considered here. + */ + DofComm periodicDofs; + /** \brief * If periodic boundaries are used, this map stores, for each periodic * boundary type, for all DOFs in rank's partition (that are on periodic * boundaries), the corresponding mapped periodic DOFs. The mapping is * defined by using global DOF indices. */ - PeriodicDofMap periodicDof; + PeriodicDofMapFeSpace periodicDofMap; /** \brief * If periodic boundaries are used, this map stores to each periodic DOF in @@ -698,7 +725,7 @@ namespace AMDiS { * with all boundaries being periodic, the four corners are associated by * two different boundaries. */ - map<int, std::set<BoundaryType> > periodicDofAssociations; + map<const FiniteElemSpace*, map<DegreeOfFreedom, std::set<BoundaryType> > > periodicDofAssociations; /// This set of values must be interchanged between ranks when the mesh is diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc index f61f32de9c166d55036c01abccdc63f369b4c495..7bda65f6a96f26a3e63e8e0989f71b1000988b25 100644 --- a/AMDiS/src/parallel/ParallelDebug.cc +++ b/AMDiS/src/parallel/ParallelDebug.cc @@ -145,15 +145,21 @@ namespace AMDiS { { FUNCNAME("ParallelDebug::testPeriodicBoundary()"); - return; + for (unsigned int i = 0; i < pdb.feSpaces.size(); i++) + testPeriodicBoundary(pdb, pdb.feSpaces[i]); + } + - TEST_EXIT(pdb.feSpaces.size() == 1)("Shoudl not happen!\n"); + void ParallelDebug::testPeriodicBoundary(MeshDistributor &pdb, + const FiniteElemSpace *feSpace) + { + FUNCNAME("ParallelDebug::testPeriodicBoundary()"); // === 1. check: All periodic DOFs should have at least a correct number === // === of periodic associations. === - for (map<int, std::set<BoundaryType> >::iterator it = pdb.periodicDofAssociations.begin(); - it != pdb.periodicDofAssociations.end(); ++it) { + for (map<int, std::set<BoundaryType> >::iterator it = pdb.periodicDofAssociations[feSpace].begin(); + it != pdb.periodicDofAssociations[feSpace].end(); ++it) { WorldVector<double> c; pdb.mesh->getDofIndexCoords(it->first, pdb.feSpaces[0], c); int nAssoc = it->second.size(); @@ -172,7 +178,7 @@ namespace AMDiS { for (int i = 1; i < pdb.mpiSize; i++) stdMpi.recv(i); } else { - stdMpi.send(0, pdb.periodicDof); + stdMpi.send(0, pdb.periodicDofMap[feSpace]); } stdMpi.startCommunication(); @@ -184,7 +190,7 @@ namespace AMDiS { if (pdb.mpiRank == 0) { // Stores to each rank the periodic DOF mappings of this rank. map<int, PeriodicDofMap> rankToMaps; - PeriodicDofMap dofMap = pdb.periodicDof; + PeriodicDofMap dofMap = pdb.periodicDofMap[feSpace]; rankToMaps[0] = dofMap; for (int i = 1; i < pdb.mpiSize; i++) { @@ -243,9 +249,10 @@ namespace AMDiS { TEST_EXIT(foundError == 0)("Error found on at least on rank!\n"); - // === 3. check: On all edge and face periodic DOFs, at least on coordinate of === - // === each periodic DOF pair must be equal (at least as long we consider === - // === periodic boundaries only on rectangulars and boxes. === + // === 3. check: On all edge and face periodic DOFs, at least one === + // === componant of coordinates of each periodic DOF pair must be equal === + // === (at least as long we consider periodic boundaries only on === + // === rectangulars and boxes. === RankToCoords sendCoords; map<int, vector<BoundaryType> > rankToDofType; @@ -261,11 +268,11 @@ namespace AMDiS { continue; DofContainer dofs; - boundIt->rankObj.el->getAllDofs(pdb.feSpaces[0], boundIt->rankObj, dofs); + boundIt->rankObj.el->getAllDofs(feSpace, boundIt->rankObj, dofs); for (unsigned int i = 0; i < dofs.size(); i++) { WorldVector<double> c; - pdb.mesh->getDofIndexCoords(*(dofs[i]), pdb.feSpaces[0], c); + pdb.mesh->getDofIndexCoords(*(dofs[i]), feSpace, c); sendCoords[it->first].push_back(c); rankToDofType[it->first].push_back(boundIt->type); } @@ -298,8 +305,7 @@ namespace AMDiS { if (c0[j] == c1[j]) nEqual++; - if ((rankToDofType[it->first][i] >= -3 && nEqual < 2) || - (rankToDofType[it->first][i] < -3 && nEqual == 0)) { + if (nEqual == 0) { MSG("[DBG] %d-ith periodic DOF in boundary between ranks %d <-> %d is not correct!\n", i, pdb.mpiRank, it->first); MSG("[DBG] Coords on rank %d: %f %f %f\n", @@ -472,10 +478,14 @@ namespace AMDiS { CoordsIndexMap coordsToIndex; DOFIterator<WorldVector<double> > it(&coords, USED_DOFS); - for (it.reset(); !it.end(); ++it) + for (it.reset(); !it.end(); ++it) { coordsToIndex[(*it)] = pdb.dofFeData[feSpace].mapLocalGlobalDofs[it.getDOFIndex()]; +// MSG(" CHECK FOR DOF %d AT COORDS %f %f %f\n", +// coordsToIndex[(*it)], (*it)[0], (*it)[1], (*it)[2]); + } + StdMpi<CoordsIndexMap> stdMpi(pdb.mpiComm, true); for (DofComm::Iterator it(pdb.sendDofs, feSpace); !it.end(); it.nextRank()) stdMpi.send(it.getRank(), coordsToIndex); @@ -499,7 +509,7 @@ namespace AMDiS { oss << coordsIt->first[i] << " "; oss << " do not fit together on rank " << pdb.getMpiRank() << " (global index: " - << coordsToIndex[coordsIt->first] << " and on rank " + << coordsToIndex[coordsIt->first] << ") and on rank " << it.getRank() << " (global index: " << coordsIt->second << ")"; MSG("[DBG] %s\n", oss.str().c_str()); @@ -676,8 +686,8 @@ namespace AMDiS { if (rank == -1 || pdb.mpiRank == rank) { cout << "====== DOF MAP PERIODIC ====== " << endl; - for (PeriodicDofMap::iterator it = pdb.periodicDof.begin(); - it != pdb.periodicDof.end(); ++it) { + for (PeriodicDofMap::iterator it = pdb.periodicDofMap.begin(); + it != pdb.periodicDofMap.end(); ++it) { cout << "DOF MAP " << it->first << ": "; for (std::set<DegreeOfFreedom>::iterator dofit = it->second.begin(); dofit != it->second.end(); ++dofit) diff --git a/AMDiS/src/parallel/ParallelDebug.h b/AMDiS/src/parallel/ParallelDebug.h index e6069cc79ab3ee150add4026efbb70f5cb30026b..467277871e43006f900268ed9dcc509f5f586594 100644 --- a/AMDiS/src/parallel/ParallelDebug.h +++ b/AMDiS/src/parallel/ParallelDebug.h @@ -52,6 +52,15 @@ namespace AMDiS { */ static void testPeriodicBoundary(MeshDistributor &pdb); + /** \brief + * Test if all periodic boundaries are set in a consistent way on all ranks. + * + * \param[in] pdb Parallel problem definition used for debugging. + * \oaram[in] feSpace FE space for which the DOFs are tested. + */ + static void testPeriodicBoundary(MeshDistributor &pdb, + const FiniteElemSpace *feSpace); + /** \brief * This function is used for debugging only. It traverses all interior boundaries * and compares the DOF indices on them with the dof indices of the boundarys diff --git a/AMDiS/src/parallel/ParallelTypes.h b/AMDiS/src/parallel/ParallelTypes.h index 5f2c69c3b9eb6e45aa4dca57ee6b47422f4378dc..f96734ea2eeb7dee3e4734816fa8341a362f0e45 100644 --- a/AMDiS/src/parallel/ParallelTypes.h +++ b/AMDiS/src/parallel/ParallelTypes.h @@ -65,6 +65,8 @@ namespace AMDiS { /// Mapps a boundar type, i.e., a boundary identifier index, to a periodic /// DOF mapping. typedef map<BoundaryType, DofMapping> PeriodicDofMap; + + typedef map<const FiniteElemSpace*, PeriodicDofMap> PeriodicDofMapFeSpace; typedef vector<MeshStructure> MeshCodeVec; } diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 195b0a9a65d3adc268b3a942e0c08b796b0b2824..3e092f9bfe7291bc1ba6d0968aff630cc532a822 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -57,9 +57,9 @@ namespace AMDiS { // === Create PETSc matrix with the computed nnz data structure. === - MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, - nOverallRows, nOverallRows, - 0, d_nnz, 0, o_nnz, &petscMatrix); + MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, + nOverallRows, nOverallRows, + 0, d_nnz, 0, o_nnz, &petscMatrix); #if (DEBUG != 0) MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime); @@ -217,26 +217,30 @@ namespace AMDiS { vector<int> globalCols; - // === Traverse all rows of the dof matrix and insert row wise the values === // === to the PETSc matrix. === for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) { + const FiniteElemSpace *rowFe = mat->getRowFeSpace(); + const FiniteElemSpace *colFe = mat->getColFeSpace(); + // Global index of the current row DOF. int globalRowDof = - meshDistributor->mapLocalToGlobal(mat->getRowFeSpace(), *cursor); + meshDistributor->mapLocalToGlobal(rowFe, *cursor); // Test if the current row DOF is a periodic DOF. - bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof); + bool periodicRow = meshDistributor->isPeriodicDof(rowFe, globalRowDof); + + // MSG("ROW %d %d is per %d\n", *cursor, globalRowDof, periodicRow); if (!periodicRow) { // === Row DOF index is not periodic. === // Calculate PETSc row index. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(*cursor, dispAddRow))) + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalRowDof, dispAddRow))) ("Should not happen!\n"); - int rowIndex = dofToGlobalIndex[make_pair(*cursor, dispAddRow)]; + int rowIndex = dofToGlobalIndex[make_pair(globalRowDof, dispAddRow)]; cols.clear(); values.clear(); @@ -246,13 +250,13 @@ namespace AMDiS { // Global index of the current column index. int globalColDof = - meshDistributor->mapLocalToGlobal(mat->getColFeSpace(), col(*icursor)); + meshDistributor->mapLocalToGlobal(colFe, col(*icursor)); // Test if the current col dof is a periodic dof. - bool periodicCol = meshDistributor->isPeriodicDof(globalColDof); + bool periodicCol = meshDistributor->isPeriodicDof(colFe, globalColDof); // Calculate PETSc col index. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(col(*icursor), dispAddCol))) + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalColDof, dispAddCol))) ("Should not happen!\n"); - int colIndex = dofToGlobalIndex[make_pair(col(*icursor), dispAddCol)]; + int colIndex = dofToGlobalIndex[make_pair(globalColDof, dispAddCol)]; // Ignore all zero entries, expect it is a diagonal entry. if (value(*icursor) == 0.0 && rowIndex != colIndex) @@ -268,7 +272,7 @@ namespace AMDiS { // Create set of all periodic associations of the column index. std::set<int> perAsc; std::set<int>& perColAsc = - meshDistributor->getPerDofAssociations(globalColDof); + meshDistributor->getPerDofAssociations(colFe, globalColDof); for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it) if (*it >= -3) @@ -293,16 +297,20 @@ namespace AMDiS { int nCols = static_cast<int>(newCols.size()); for (int i = 0; i < nCols; i++) { - TEST_EXIT_DBG(meshDistributor->isPeriodicDof(newCols[i], *it)) + TEST_EXIT_DBG(meshDistributor->isPeriodicDof(colFe, *it, newCols[i])) ("Wrong periodic DOF associations at boundary %d with DOF %d!\n", *it, newCols[i]); - newCols.push_back(meshDistributor->getPeriodicMapping(newCols[i], *it)); + newCols.push_back(meshDistributor->getPeriodicMapping(colFe, *it, newCols[i])); } } for (unsigned int i = 0; i < newCols.size(); i++) { - cols.push_back(newCols[i] * dispMult + dispAddCol); + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(newCols[i], dispAddCol))) + ("Cannot find index for DOF %d at Component %d\n", + newCols[i], dispAddCol); + + cols.push_back(dofToGlobalIndex[make_pair(newCols[i], dispAddCol)]); values.push_back(scaledValue); } } @@ -326,7 +334,7 @@ namespace AMDiS { // Global index of the current column index. int globalColDof = - meshDistributor->mapLocalToGlobal(mat->getColFeSpace(), col(*icursor)); + meshDistributor->mapLocalToGlobal(colFe, col(*icursor)); // Ignore all zero entries, expect it is a diagonal entry. if (value(*icursor) == 0.0 && globalRowDof != globalColDof) @@ -338,9 +346,9 @@ namespace AMDiS { std::set<int> perAsc; - if (meshDistributor->isPeriodicDof(globalColDof)) { + if (meshDistributor->isPeriodicDof(colFe, globalColDof)) { std::set<int>& perColAsc = - meshDistributor->getPerDofAssociations(globalColDof); + meshDistributor->getPerDofAssociations(colFe, globalColDof); for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it) if (*it >= -3) @@ -348,7 +356,7 @@ namespace AMDiS { } std::set<int>& perRowAsc = - meshDistributor->getPerDofAssociations(globalRowDof); + meshDistributor->getPerDofAssociations(rowFe, globalRowDof); for (std::set<int>::iterator it = perRowAsc.begin(); it != perRowAsc.end(); ++it) if (*it >= -3) @@ -367,20 +375,21 @@ namespace AMDiS { // First, add the original entry. entry.push_back(make_pair(globalRowDof, globalColDof)); - // Then, traverse the periodic associations of the row and column indices - // and create the corresponding entries. + // Then, traverse the periodic associations of the row and column + // indices and create the corresponding entries. for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) { int nEntry = static_cast<int>(entry.size()); for (int i = 0; i < nEntry; i++) { int perRowDof = 0; - if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].first)) - perRowDof = meshDistributor->getPeriodicMapping(entry[i].first, *it); + + if (meshDistributor->isPeriodicDof(rowFe, *it, entry[i].first)) + perRowDof = meshDistributor->getPeriodicMapping(rowFe, *it, entry[i].first); else perRowDof = entry[i].first; int perColDof; - if (meshDistributor->getPeriodicMapping()[*it].count(entry[i].second)) - perColDof = meshDistributor->getPeriodicMapping(entry[i].second, *it); + if (meshDistributor->isPeriodicDof(colFe, *it, entry[i].second)) + perColDof = meshDistributor->getPeriodicMapping(colFe, *it, entry[i].second); else perColDof = entry[i].second; @@ -392,14 +401,23 @@ namespace AMDiS { // === Translate the matrix entries to PETSc's matrix. - for (vector<pair<int, int> >::iterator eIt = entry.begin(); - eIt != entry.end(); ++eIt) { + + for (unsigned int i = 0; i < entry.size(); i++) { // Calculate row and column indices of the PETSc matrix. - int rowIndex = eIt->first * dispMult + dispAddRow; - int colIndex = eIt->second * dispMult + dispAddCol; - colsMap[rowIndex].push_back(colIndex); - valsMap[rowIndex].push_back(scaledValue); + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(entry[i].first, dispAddRow))) + ("Cannot find index for DOF %d at Component %d\n", + entry[i].first, dispAddRow); + + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(entry[i].second, dispAddCol))) + ("Cannot find index for DOF %d at Component %d\n", + entry[i].second, dispAddCol); + + int i0 = dofToGlobalIndex[make_pair(entry[i].first, dispAddRow)]; + int i1 = dofToGlobalIndex[make_pair(entry[i].second, dispAddCol)]; + + colsMap[i0].push_back(i1); + valsMap[i0].push_back(scaledValue); } } @@ -440,18 +458,22 @@ namespace AMDiS { DegreeOfFreedom globalRowDof = meshDistributor->mapLocalToGlobal(feSpace, dofIt.getDOFIndex()); // Calculate PETSc index of the row DOF. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(dofIt.getDOFIndex(), dispAdd))) + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalRowDof, dispAdd))) ("Should not happen!\n"); - int index = dofToGlobalIndex[make_pair(dofIt.getDOFIndex(), dispAdd)]; + int index = dofToGlobalIndex[make_pair(globalRowDof, dispAdd)]; - if (meshDistributor->isPeriodicDof(globalRowDof)) { - std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof); + if (meshDistributor->isPeriodicDof(feSpace, globalRowDof)) { + std::set<int>& perAsc = + meshDistributor->getPerDofAssociations(feSpace, globalRowDof); double value = *dofIt / (perAsc.size() + 1.0); VecSetValues(petscVec, 1, &index, &value, ADD_VALUES); for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) { - int mappedDof = meshDistributor->getPeriodicMapping(globalRowDof, *perIt); - int mappedIndex = mappedDof * dispMult + dispAdd; + int mappedDof = + meshDistributor->getPeriodicMapping(feSpace, *perIt, globalRowDof); + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(mappedDof, dispAdd))) + ("Should not happen!\n"); + int mappedIndex = dofToGlobalIndex[make_pair(mappedDof, dispAdd)]; VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES); } } else { @@ -541,8 +563,11 @@ namespace AMDiS { for (cursor_type cursor = begin<row>(bmat), cend = end<row>(bmat); cursor != cend; ++cursor) { + int globalRowDof = + meshDistributor->mapLocalToGlobal(feSpaces[i], *cursor); + // The corresponding global matrix row index of the current row DOF. - int petscRowIdx = dofToGlobalIndex[make_pair(*cursor, i)]; + int petscRowIdx = dofToGlobalIndex[make_pair(globalRowDof, i)]; if (meshDistributor->getIsRankDof(feSpaces[i], *cursor)) { @@ -566,7 +591,9 @@ namespace AMDiS { // Traverse all non zero entries in this row. for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); icursor != icend; ++icursor) { - int petscColIdx = dofToGlobalIndex[make_pair(col(*icursor), j)]; + int globalColDof = + meshDistributor->mapLocalToGlobal(feSpaces[j], col(*icursor)); + int petscColIdx = dofToGlobalIndex[make_pair(globalColDof, j)]; if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) { // The row DOF is a rank DOF, if also the column is a rank DOF, @@ -592,7 +619,9 @@ namespace AMDiS { for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); icursor != icend; ++icursor) { if (value(*icursor) != 0.0) { - int petscColIdx = dofToGlobalIndex[make_pair(col(*icursor), j)]; + int globalColDof = + meshDistributor->mapLocalToGlobal(feSpaces[j], col(*icursor)); + int petscColIdx = dofToGlobalIndex[make_pair(globalColDof, j)]; sendMatrixEntry[sendToRank]. push_back(make_pair(petscRowIdx, petscColIdx)); @@ -657,6 +686,8 @@ namespace AMDiS { int offset = meshDistributor->getStartDofs(feSpaces); Mesh *mesh = meshDistributor->getMesh(); + dofToGlobalIndex.clear(); + for (unsigned int i = 0; i < feSpaces.size(); i++) { // === Create indices for all DOFs in rank' domain. === @@ -665,17 +696,16 @@ namespace AMDiS { for (std::set<const DegreeOfFreedom*>::iterator it = rankDofSet.begin(); it != rankDofSet.end(); ++it) if (meshDistributor->getIsRankDof(feSpaces[i], **it)) { - int localIndex = **it; - int globalIndex = - meshDistributor->mapLocalToGlobal(feSpaces[i], localIndex) - - meshDistributor->getStartDofs(feSpaces[i]) + - offset; + int globalIndex = + meshDistributor->mapLocalToGlobal(feSpaces[i], **it); + int globalMatIndex = + globalIndex - meshDistributor->getStartDofs(feSpaces[i]) + offset; - dofToGlobalIndex[make_pair(localIndex, i)] = globalIndex; + dofToGlobalIndex[make_pair(globalIndex, i)] = globalMatIndex; } - // === Communicated interior boundary DOFs between domains. === + // === Communicate interior boundary DOFs between domains. === StdMpi<vector<int> > stdMpi(meshDistributor->getMpiComm()); @@ -684,8 +714,10 @@ namespace AMDiS { vector<DegreeOfFreedom> sendGlobalDofs; for (; !it.endDofIter(); it.nextDof()) { - int globalIndex = dofToGlobalIndex[make_pair(it.getDofIndex(), i)]; - sendGlobalDofs.push_back(globalIndex); + int globalIndex = + meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); + int globalMatIndex = dofToGlobalIndex[make_pair(globalIndex, i)]; + sendGlobalDofs.push_back(globalMatIndex); } stdMpi.send(it.getRank(), sendGlobalDofs); @@ -700,11 +732,46 @@ namespace AMDiS { for (DofComm::Iterator it(meshDistributor->getRecvDofs(), feSpaces[i]); !it.end(); it.nextRank()) for (; !it.endDofIter(); it.nextDof()) { - DegreeOfFreedom localIndex = it.getDofIndex(); - int globalIndex = stdMpi.getRecvData(it.getRank())[it.getDofCounter()]; - dofToGlobalIndex[make_pair(localIndex, i)] = globalIndex; + int globalIndex = meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); + int globalMatIndex = stdMpi.getRecvData(it.getRank())[it.getDofCounter()]; + + dofToGlobalIndex[make_pair(globalIndex, i)] = globalMatIndex; + } + + + // === Communicate DOFs on periodic boundaries. === + + stdMpi.clear(); + + for (DofComm::Iterator it(meshDistributor->getPeriodicDofs(), feSpaces[i]); + !it.end(); it.nextRank()) { + vector<DegreeOfFreedom> sendGlobalDofs; + + for (; !it.endDofIter(); it.nextDof()) { + int ind0 = meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); + TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(ind0, i))) + ("Canot find index for DOF %d/%d at component %d\n", it.getDofIndex(), ind0, i); + + int ind1 = dofToGlobalIndex[make_pair(ind0, i)]; + + sendGlobalDofs.push_back(ind0); + sendGlobalDofs.push_back(ind1); + } + + stdMpi.send(it.getRank(), sendGlobalDofs); + stdMpi.recv(it.getRank()); + } + + stdMpi.startCommunication(); + + for (DofComm::Iterator it(meshDistributor->getPeriodicDofs(), feSpaces[i]); + !it.end(); it.nextRank()) + for (; !it.endDofIter(); it.nextDof()) { + int ind0 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2]; + int ind1 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2 + 1]; + + dofToGlobalIndex[make_pair(ind0, i)] = ind1; } - // === Update offset. === diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h index 9c3e95a039eb6beb9af0ab7aa39c203af8c4fec7..948d652f412bbdf35651dc26a82ebe2b5b316f99 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h @@ -23,6 +23,7 @@ #ifndef AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H #define AMDIS_PETSC_SOLVER_GLOBAL_MATRIX_H +#include <boost/tuple/tuple.hpp> #include "AMDiS_fwd.h" #include "parallel/PetscSolver.h" @@ -30,7 +31,6 @@ namespace AMDiS { using namespace std; - class PetscSolverGlobalMatrix : public PetscSolver { public: