// // Software License for AMDiS // // Copyright (c) 2010 Dresden University of Technology // All rights reserved. // Authors: Simon Vey, Thomas Witkowski et al. // // This file is part of AMDiS // // See also license.opensource.txt in the distribution. #include "parallel/PetscSolverGlobalBlockMatrix.h" #include "parallel/StdMpi.h" #include "parallel/MpiHelper.h" namespace AMDiS { void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix *mat) { FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()"); TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n"); TEST_EXIT_DBG(mat)("No DOF matrix defined!\n"); double wtime = MPI::Wtime(); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); nComponents = mat->getNumRows(); int nRankRows = meshDistributor->getNumberRankDofs(feSpace); int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace); #if (DEBUG != 0) MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime); #endif nestMat.resize(nComponents * nComponents); // === Transfer values from DOF matrices to the PETSc matrix. === for (int i = 0; i < nComponents; i++) for (int j = 0; j < nComponents; j++) if ((*mat)[i][j]) { MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows, 30, PETSC_NULL, 30, PETSC_NULL, &(nestMat[i * nComponents + j])); setDofMatrix(nestMat[i * nComponents + j], (*mat)[i][j]); MatAssemblyBegin(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY); MatAssemblyEnd(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY); } else { nestMat[i * nComponents + j] = PETSC_NULL; } MatCreateNest(PETSC_COMM_WORLD, nComponents, PETSC_NULL, nComponents, PETSC_NULL, &(nestMat[0]), &petscMatrix); #if (DEBUG != 0) MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime); #endif MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY); // === Init PETSc solver. === KSPCreate(PETSC_COMM_WORLD, &solver); KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); KSPSetFromOptions(solver); KSPGetPC(solver, &pc); setBlockPreconditioner(pc); MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime); } void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec) { FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()"); TEST_EXIT_DBG(vec)("NO DOF vector defined!\n"); nComponents = vec->getSize(); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); int nRankRows = meshDistributor->getNumberRankDofs(feSpace); int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace); nestVec.resize(nComponents); for (int i = 0; i < nComponents; i++) { VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &(nestVec[i])); setDofVector(nestVec[i], vec->getDOFVector(i)); VecAssemblyBegin(nestVec[i]); VecAssemblyEnd(nestVec[i]); } VecCreateNest(PETSC_COMM_WORLD, nComponents, PETSC_NULL, &(nestVec[0]), &petscRhsVec); VecAssemblyBegin(petscRhsVec); VecAssemblyEnd(petscRhsVec); } void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) { FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()"); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); VecDuplicate(petscRhsVec, &petscSolVec); // PETSc. KSPSolve(solver, petscRhsVec, petscSolVec); // === Transfere values from PETSc's solution vectors to the DOF vectors. === for (int i = 0; i < nComponents; i++) { DOFVector &dofvec = *(vec.getDOFVector(i)); Vec tmp; VecNestGetSubVec(petscSolVec, i, &tmp); int nRankDofs = meshDistributor->getNumberRankDofs(feSpace); PetscScalar *vecPointer; VecGetArray(tmp, &vecPointer); for (int j = 0; j < nRankDofs; j++) dofvec[meshDistributor->mapLocalToDofIndex(feSpace, j)] = vecPointer[j]; VecRestoreArray(tmp, &vecPointer); } // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to === // === more than one partition. === meshDistributor->synchVector(vec); // === Destroy PETSc's variables. === VecDestroy(&petscRhsVec); for (int i = 0; i < nComponents; i++) VecDestroy(&(nestVec[i])); VecDestroy(&petscSolVec); } void PetscSolverGlobalBlockMatrix::destroyMatrixData() { FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()"); for (unsigned int i = 0; i < nestMat.size(); i++) if (nestMat[i] != PETSC_NULL) MatDestroy(&(nestMat[i])); MatDestroy(&petscMatrix); KSPDestroy(&solver); } void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, DOFMatrix* mat) { FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()"); TEST_EXIT(mat)("No DOFMatrix!\n"); TEST_EXIT(petscMat)("No PETSc matrix!\n"); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end; namespace traits = mtl::traits; typedef DOFMatrix::base_matrix_type Matrix; traits::col::type col(mat->getBaseMatrix()); traits::const_value::type value(mat->getBaseMatrix()); typedef traits::range_generator::type cursor_type; typedef traits::range_generator::type icursor_type; vector cols; vector values; cols.reserve(300); values.reserve(300); // === Traverse all rows of the dof matrix and insert row wise the values === // === to the PETSc matrix. === for (cursor_type cursor = begin(mat->getBaseMatrix()), cend = end(mat->getBaseMatrix()); cursor != cend; ++cursor) { // Global index of the current row DOF. int rowIndex = meshDistributor->mapLocalToGlobal(feSpace, *cursor); cols.clear(); values.clear(); for (icursor_type icursor = begin(cursor), icend = end(cursor); icursor != icend; ++icursor) { // Global index of the current column index. int colIndex = meshDistributor->mapLocalToGlobal(feSpace, col(*icursor)); // Ignore all zero entries, expect it is a diagonal entry. if (value(*icursor) == 0.0 && rowIndex != colIndex) continue; // Calculate the exact position of the column index in the PETSc matrix. cols.push_back(colIndex); values.push_back(value(*icursor)); } MatSetValues(petscMat, 1, &rowIndex, cols.size(), &(cols[0]), &(values[0]), ADD_VALUES); } } void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, DOFVector* vec) { FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()"); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); // Traverse all used DOFs in the dof vector. DOFVector::Iterator dofIt(vec, USED_DOFS); for (dofIt.reset(); !dofIt.end(); ++dofIt) { int index = meshDistributor->mapLocalToGlobal(feSpace, dofIt.getDOFIndex()); double value = *dofIt; VecSetValues(petscVec, 1, &index, &value, ADD_VALUES); } } }