diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 3e092f9bfe7291bc1ba6d0968aff630cc532a822..02840c8c80c739413f28e37a4feb2e920a4db475 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -58,7 +58,7 @@ namespace AMDiS { // === Create PETSc matrix with the computed nnz data structure. === MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, - nOverallRows, nOverallRows, + nOverallRows, nOverallRows, 0, d_nnz, 0, o_nnz, &petscMatrix); #if (DEBUG != 0) @@ -232,15 +232,11 @@ namespace AMDiS { // Test if the current row DOF is a periodic DOF. bool periodicRow = meshDistributor->isPeriodicDof(rowFe, globalRowDof); - // MSG("ROW %d %d is per %d\n", *cursor, globalRowDof, periodicRow); - if (!periodicRow) { // === Row DOF index is not periodic. === - // Calculate PETSc row index. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalRowDof, dispAddRow))) - ("Should not happen!\n"); - int rowIndex = dofToGlobalIndex[make_pair(globalRowDof, dispAddRow)]; + // Get PETSc's mat row index. + int rowIndex = dofToMatIndex.get(dispAddRow, globalRowDof); cols.clear(); values.clear(); @@ -253,10 +249,8 @@ namespace AMDiS { meshDistributor->mapLocalToGlobal(colFe, col(*icursor)); // Test if the current col dof is a periodic dof. bool periodicCol = meshDistributor->isPeriodicDof(colFe, globalColDof); - // Calculate PETSc col index. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalColDof, dispAddCol))) - ("Should not happen!\n"); - int colIndex = dofToGlobalIndex[make_pair(globalColDof, dispAddCol)]; + // Get PETSc's mat col index. + int colIndex = dofToMatIndex.get(dispAddCol, globalColDof); // Ignore all zero entries, expect it is a diagonal entry. if (value(*icursor) == 0.0 && rowIndex != colIndex) @@ -306,11 +300,7 @@ namespace AMDiS { } for (unsigned int i = 0; i < newCols.size(); i++) { - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(newCols[i], dispAddCol))) - ("Cannot find index for DOF %d at Component %d\n", - newCols[i], dispAddCol); - - cols.push_back(dofToGlobalIndex[make_pair(newCols[i], dispAddCol)]); + cols.push_back(dofToMatIndex.get(dispAddCol, newCols[i])); values.push_back(scaledValue); } } @@ -401,23 +391,12 @@ namespace AMDiS { // === Translate the matrix entries to PETSc's matrix. - for (unsigned int i = 0; i < entry.size(); i++) { - // Calculate row and column indices of the PETSc matrix. + int rowIdx = dofToMatIndex.get(dispAddRow, entry[i].first); + int colIdx = dofToMatIndex.get(dispAddCol, entry[i].second); - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(entry[i].first, dispAddRow))) - ("Cannot find index for DOF %d at Component %d\n", - entry[i].first, dispAddRow); - - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(entry[i].second, dispAddCol))) - ("Cannot find index for DOF %d at Component %d\n", - entry[i].second, dispAddCol); - - int i0 = dofToGlobalIndex[make_pair(entry[i].first, dispAddRow)]; - int i1 = dofToGlobalIndex[make_pair(entry[i].second, dispAddCol)]; - - colsMap[i0].push_back(i1); - valsMap[i0].push_back(scaledValue); + colsMap[rowIdx].push_back(colIdx); + valsMap[rowIdx].push_back(scaledValue); } } @@ -457,10 +436,8 @@ namespace AMDiS { // Calculate global row index of the DOF. DegreeOfFreedom globalRowDof = meshDistributor->mapLocalToGlobal(feSpace, dofIt.getDOFIndex()); - // Calculate PETSc index of the row DOF. - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(globalRowDof, dispAdd))) - ("Should not happen!\n"); - int index = dofToGlobalIndex[make_pair(globalRowDof, dispAdd)]; + // Get PETSc's mat index of the row DOF. + int index = dofToMatIndex.get(dispAdd, globalRowDof); if (meshDistributor->isPeriodicDof(feSpace, globalRowDof)) { std::set& perAsc = @@ -471,9 +448,7 @@ namespace AMDiS { for (std::set::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) { int mappedDof = meshDistributor->getPeriodicMapping(feSpace, *perIt, globalRowDof); - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(mappedDof, dispAdd))) - ("Should not happen!\n"); - int mappedIndex = dofToGlobalIndex[make_pair(mappedDof, dispAdd)]; + int mappedIndex = dofToMatIndex.get(dispAdd, mappedDof); VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES); } } else { @@ -567,7 +542,7 @@ namespace AMDiS { meshDistributor->mapLocalToGlobal(feSpaces[i], *cursor); // The corresponding global matrix row index of the current row DOF. - int petscRowIdx = dofToGlobalIndex[make_pair(globalRowDof, i)]; + int petscRowIdx = dofToMatIndex.get(i, globalRowDof); if (meshDistributor->getIsRankDof(feSpaces[i], *cursor)) { @@ -593,7 +568,7 @@ namespace AMDiS { icend = end(cursor); icursor != icend; ++icursor) { int globalColDof = meshDistributor->mapLocalToGlobal(feSpaces[j], col(*icursor)); - int petscColIdx = dofToGlobalIndex[make_pair(globalColDof, j)]; + int petscColIdx = dofToMatIndex.get(j, globalColDof); if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) { // The row DOF is a rank DOF, if also the column is a rank DOF, @@ -621,7 +596,7 @@ namespace AMDiS { if (value(*icursor) != 0.0) { int globalColDof = meshDistributor->mapLocalToGlobal(feSpaces[j], col(*icursor)); - int petscColIdx = dofToGlobalIndex[make_pair(globalColDof, j)]; + int petscColIdx = dofToMatIndex.get(j, globalColDof); sendMatrixEntry[sendToRank]. push_back(make_pair(petscRowIdx, petscColIdx)); @@ -686,7 +661,7 @@ namespace AMDiS { int offset = meshDistributor->getStartDofs(feSpaces); Mesh *mesh = meshDistributor->getMesh(); - dofToGlobalIndex.clear(); + dofToMatIndex.clear(); for (unsigned int i = 0; i < feSpaces.size(); i++) { @@ -701,7 +676,7 @@ namespace AMDiS { int globalMatIndex = globalIndex - meshDistributor->getStartDofs(feSpaces[i]) + offset; - dofToGlobalIndex[make_pair(globalIndex, i)] = globalMatIndex; + dofToMatIndex.add(i, globalIndex, globalMatIndex); } @@ -716,7 +691,7 @@ namespace AMDiS { for (; !it.endDofIter(); it.nextDof()) { int globalIndex = meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); - int globalMatIndex = dofToGlobalIndex[make_pair(globalIndex, i)]; + int globalMatIndex = dofToMatIndex.get(i, globalIndex); sendGlobalDofs.push_back(globalMatIndex); } @@ -735,7 +710,7 @@ namespace AMDiS { int globalIndex = meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); int globalMatIndex = stdMpi.getRecvData(it.getRank())[it.getDofCounter()]; - dofToGlobalIndex[make_pair(globalIndex, i)] = globalMatIndex; + dofToMatIndex.add(i, globalIndex, globalMatIndex); } @@ -749,10 +724,7 @@ namespace AMDiS { for (; !it.endDofIter(); it.nextDof()) { int ind0 = meshDistributor->mapLocalToGlobal(feSpaces[i], it.getDofIndex()); - TEST_EXIT_DBG(dofToGlobalIndex.count(make_pair(ind0, i))) - ("Canot find index for DOF %d/%d at component %d\n", it.getDofIndex(), ind0, i); - - int ind1 = dofToGlobalIndex[make_pair(ind0, i)]; + int ind1 = dofToMatIndex.get(i, ind0); sendGlobalDofs.push_back(ind0); sendGlobalDofs.push_back(ind1); @@ -770,7 +742,7 @@ namespace AMDiS { int ind0 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2]; int ind1 = stdMpi.getRecvData(it.getRank())[it.getDofCounter() * 2 + 1]; - dofToGlobalIndex[make_pair(ind0, i)] = ind1; + dofToMatIndex.add(i, ind0, ind1); } // === Update offset. === diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h index 948d652f412bbdf35651dc26a82ebe2b5b316f99..b9e827146a69770d225ef6112f77164035e584eb 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.h +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.h @@ -31,6 +31,54 @@ namespace AMDiS { using namespace std; + /** \brief + * Defines for each system component a mapping for sets of global DOF indices + * to global matrix indices. The mapping is defined for all DOFs in rank's + * subdomain. When periodic boundary conditions are used, then the mapping + * stores also information for the periodic associations of rank's DOF on + * periodic boundaries. + */ + class DofToMatIndex + { + public: + DofToMatIndex() {} + + /// Reset the data structure. + inline void clear() + { + data.clear(); + } + + /// Add a new mapping. + inline void add(int component, DegreeOfFreedom dof, int globalMatIndex) + { + data[component][dof] = globalMatIndex; + } + + /// Map a global DOF index to the global matrix index for a specific + /// system component number. + inline int get(int component, DegreeOfFreedom dof) + { + FUNCNAME("DofToMatIndex::get()"); + + TEST_EXIT_DBG(data.count(component)) + ("No mapping data for component %d available!\n", component); + + TEST_EXIT_DBG(data[component].count(dof)) + ("Mapping for DOF %d in component %d does not exists!\n", + dof, component); + + return data[component][dof]; + } + + private: + /// The mapping data. For each system component there is a specific map that + /// maps global DOF indices to global matrix indices. + map > data; + }; + + + class PetscSolverGlobalMatrix : public PetscSolver { public: @@ -91,7 +139,9 @@ namespace AMDiS { /// some phase fields. bool alwaysCreateNnzStructure; - map, int> dofToGlobalIndex; + /// Mapping from global DOF indices to global matrix indices under + /// consideration of possibly multiple components. + DofToMatIndex dofToMatIndex; };