From a125ff82d28fe62ad01675e593e62247ec48cf57 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski Date: Tue, 15 May 2012 15:11:51 +0000 Subject: [PATCH] Blub, go home. --- AMDiS/src/parallel/ParallelDofMapping.h | 10 +++ AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 87 +++++++------------ 2 files changed, 43 insertions(+), 54 deletions(-) diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h index 700520ef..02be39fa 100644 --- a/AMDiS/src/parallel/ParallelDofMapping.h +++ b/AMDiS/src/parallel/ParallelDofMapping.h @@ -313,6 +313,11 @@ namespace AMDiS { needMatIndexFromGlobal = global; } + inline bool isMatIndexFromGlobal() + { + return needMatIndexFromGlobal; + } + /// Access the DOF mapping for a given FE space. inline FeSpaceDofMap& operator[](const FiniteElemSpace* feSpace) { @@ -436,6 +441,11 @@ namespace AMDiS { /// mapping from DOF indices to matrix row indices is defined on local /// or global DOF indices. If true, the mapping is to specify and to use /// on global ones, otherwise on local DOF indices. + /// In most scenarios the mapping stored on local DOF indices is what we + /// want to have. Only when periodic boundary conditions are used together + /// with some global matrix approache, the matrix indices must be stored + /// also for DOFs that are not part of the local subdomain. Thus, the + /// mapping will be stored on global DOF indices. bool needMatIndexFromGlobal; /// Maps from FE space pointers to DOF mappings. diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index aa99d09e..02f4449c 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -65,12 +65,6 @@ namespace AMDiS { VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &petscSolVec); - int testddd = 1; - Parameters::get("block size", testddd); - if (testddd > 1) - VecSetBlockSize(petscSolVec, testddd); - - // === Create PETSc matrix with the computed nnz data structure. === @@ -78,11 +72,6 @@ namespace AMDiS { nOverallRows, nOverallRows, 0, d_nnz, 0, o_nnz, &matIntInt); - if (testddd > 1) { - MatSetBlockSize(matIntInt, testddd); - MSG("MAT SET BLOCK SIZE: %d\n", testddd); - } - #if (DEBUG != 0) MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime); #endif @@ -328,33 +317,40 @@ namespace AMDiS { { FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()"); - if (coarseSpaceMap) { - fillPetscRhsWithCoarseSpace(vec); - return; - } + VecCreateMPI(mpiCommGlobal, + interiorMap->getRankDofs(), + nGlobalOverallInterior, + &rhsInterior); + if (coarseSpaceMap) + VecCreateMPI(mpiCommGlobal, + coarseSpaceMap->getRankDofs(), + coarseSpaceMap->getOverallDofs(), + &rhsCoarseSpace); + TEST_EXIT_DBG(vec)("No DOF vector defined!\n"); TEST_EXIT_DBG(interiorMap)("No parallel DOF map defined!\n"); + - int nRankRows = interiorMap->getRankDofs(); - int nOverallRows = interiorMap->getOverallDofs(); - - VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &rhsInterior); - - int testddd = 1; - Parameters::get("block size", testddd); - if (testddd > 1) - VecSetBlockSize(rhsInterior, testddd); - - - // === Transfer values from DOF vector to the PETSc vector. === - for (int i = 0; i < vec->getSize(); i++) - setDofVector(rhsInterior, vec->getDOFVector(i), i); + if (coarseSpaceMap) { + fillPetscRhsWithCoarseSpace(vec); + } else { + // === Transfer values from DOF vector to the PETSc vector. === + for (int i = 0; i < vec->getSize(); i++) + setDofVector(rhsInterior, vec->getDOFVector(i), i); + } VecAssemblyBegin(rhsInterior); VecAssemblyEnd(rhsInterior); + if (coarseSpaceMap) { + VecAssemblyBegin(rhsCoarseSpace); + VecAssemblyEnd(rhsCoarseSpace); + } + if (removeRhsNullSpace) { + TEST_EXIT_DBG(coarseSpaceMap == NULL)("Not supported!\n"); + MSG("Remove constant null space from the RHS!\n"); MatNullSpace sp; MatNullSpaceCreate(mpiCommGlobal, PETSC_TRUE, 0, PETSC_NULL, &sp); @@ -368,18 +364,6 @@ namespace AMDiS { { FUNCNAME("SubDomainSolver::fillPetscRhs()"); - VecCreateMPI(mpiCommGlobal, - interiorMap->getRankDofs(), - nGlobalOverallInterior, - &rhsInterior); - - if (coarseSpaceMap) - VecCreateMPI(mpiCommGlobal, - coarseSpaceMap->getRankDofs(), - coarseSpaceMap->getOverallDofs(), - &rhsCoarseSpace); - - for (int i = 0; i < vec->getSize(); i++) { const FiniteElemSpace *feSpace = vec->getDOFVector(i)->getFeSpace(); DOFVector::Iterator dofIt(vec->getDOFVector(i), USED_DOFS); @@ -394,14 +378,6 @@ namespace AMDiS { } } } - - VecAssemblyBegin(rhsInterior); - VecAssemblyEnd(rhsInterior); - - if (coarseSpaceMap) { - VecAssemblyBegin(rhsCoarseSpace); - VecAssemblyEnd(rhsCoarseSpace); - } } @@ -748,23 +724,26 @@ namespace AMDiS { DegreeOfFreedom globalRowDof = (*interiorMap)[feSpace][dofIt.getDOFIndex()].global; // Get PETSc's mat index of the row DOF. - int index = interiorMap->getMatIndex(nRowVec, globalRowDof); + int index = 0; + if (interiorMap->isMatIndexFromGlobal()) + index = interiorMap->getMatIndex(nRowVec, globalRowDof); + else + index = interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()); if (perMap.isPeriodic(feSpace, globalRowDof)) { std::set& perAsc = perMap.getAssociations(feSpace, globalRowDof); double value = *dofIt / (perAsc.size() + 1.0); - VecSetValues(petscVec, 1, &index, &value, ADD_VALUES); + VecSetValue(petscVec, index, value, ADD_VALUES); for (std::set::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) { int mappedDof = perMap.map(feSpace, *perIt, globalRowDof); int mappedIndex = interiorMap->getMatIndex(nRowVec, mappedDof); - VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES); + VecSetValue(petscVec, mappedIndex, value, ADD_VALUES); } } else { // The DOF index is not periodic. - double value = *dofIt; - VecSetValues(petscVec, 1, &index, &value, ADD_VALUES); + VecSetValue(petscVec, index, *dofIt, ADD_VALUES); } } } -- GitLab