PetscHelper.h 5.85 KB
Newer Older
1 2 3 4 5 6 7
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
8
 * Authors:
9 10 11 12 13 14 15 16 17
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
18
 *
19
 ******************************************************************************/
20 21 22 23 24 25 26 27 28 29 30 31



/** \file PetscHelper.h */

#ifndef AMDIS_PETSCHELPER_H
#define AMDIS_PETSCHELPER_H

#include <mpi.h>
#include <map>
#include <vector>
#include <petsc.h>
32
#include "AMDiS_fwd.h"
33

34 35 36 37
namespace AMDiS
{
  namespace Parallel
  {
38

39
    /** \brief
40
    * In this namespace, we collect several auxiliary functions for using
41
    * PETSc in AMDiS. Many of these function may be replaced by new PETSc
42 43 44 45 46 47
    * function in upcoming versions.
    */
    namespace petsc_helper
    {

      /// Defines a PETSc matrix column wise
48 49
      typedef std::pair<std::vector<int>, std::vector<double> > SparseCol;
      typedef std::map<int, SparseCol> PetscMatCol;
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88

      /** \brief
      * Returns for a distributed matrix on each rank the local matrix in a
      * sparce column format.
      *
      * \param[in]   mat     PETSc distributerd matrix.
      * \param[out]  matCol  The sparse column represenation of the local matrix.
      */
      void getMatLocalColumn(Mat mat, PetscMatCol &matCol);

      /** \brief
      * Set a local column vector in a distributed matrix.
      *
      * \param[out]  mat     Distributed matrix.
      * \param[in]   column  Column index.
      * \param[in]   vec     Column vector.
      */
      void setMatLocalColumn(Mat mat, int column, Vec vec);

      /** \brief
      * Create a local PETSc vector representing the column of a matrix
      * stored in \ref PetscMatCol type format.
      *
      * \param[in]   column   Sparse column representation.
      * \param[out]  vec      Vector representing one column of the matrix.
      */
      void getColumnVec(const SparseCol &matCol, Vec vec);

      /** \brief
      * Computes the matrix matrix product inv(A) B = C. Matrices B and C
      * are distributed matrices. Matrix A is a local matrix on each MPI
      * task. The overall number of rows of local matrices A must be the
      * number of distriubted rows in B.
      *
      * \param[in]   mpiComm  MPI Communicator object (must fit with ksp)
      * \param[in]   ksp      inv(A) matrix given by a PETSc solver object.
      * \param[in]   mat0     matrix B
      * \param[out]  mat1     resulting matrix C, is created inside the function
      */
89 90 91
      void blockMatMatSolve(MPI::Intracomm mpiComm,
			    KSP ksp,
			    Mat mat0,
92 93 94 95 96 97 98 99 100 101
			    Mat &mat1);

      /** \brief
      * Converts a 2x2 nested matrix to a MATAIJ matrix (thus not nested).
      *
      * \param[in]  matNest  nested input matrix
      * \param[out] mat      matrix of type MATAIJ, created inside this function.
      */
      void matNestConvert(Mat matNest, Mat &mat);

102
      void setSolverWithLu(KSP ksp,
103
			  const char* kspPrefix,
104 105
			  KSPType kspType,
			  PCType pcType,
106 107 108 109 110
			  const MatSolverPackage matSolverPackage,
			  PetscReal rtol = PETSC_DEFAULT,
			  PetscReal atol = PETSC_DEFAULT,
			  PetscInt maxIt = PETSC_DEFAULT);

111
      void setSolver(KSP ksp,
112
		    const char* kspPrefix,
113 114
		    KSPType kspType,
		    PCType pcType,
115 116 117
		    PetscReal rtol = PETSC_DEFAULT,
		    PetscReal atol = PETSC_DEFAULT,
		    PetscInt maxIt = PETSC_DEFAULT);
118

119
      void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0);
120

121
    } // end namespace petsc_helper
122

123
  } // end namespace Parallel
124 125 126 127 128


  // functions for PETSc API changes
  namespace petsc
  {
129
    inline PetscErrorCode options_view(PetscViewer viewer)
130 131
    {
#if (PETSC_VERSION_MINOR >= 7)
132
      return PetscOptionsView(PETSC_NULL, viewer);
133
#else
134
      return PetscOptionsView(viewer);
135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155
#endif
    }

    inline PetscErrorCode options_insert_string(const char in_str[])
    {
#if (PETSC_VERSION_MINOR >= 7)
      return PetscOptionsInsertString(PETSC_NULL, in_str);
#else
      return PetscOptionsInsertString(in_str);
#endif
    }

    inline PetscErrorCode ksp_set_operators(KSP ksp, Mat Amat,Mat Pmat)
    {
#if (PETSC_VERSION_MINOR >= 5)
      return KSPSetOperators(ksp, Amat, Pmat);
#else
      return KSPSetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
    }

156
    inline PetscErrorCode ksp_get_operators(KSP ksp, Mat *Amat,Mat *Pmat)
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
    {
#if (PETSC_VERSION_MINOR >= 5)
      return KSPGetOperators(ksp, Amat, Pmat);
#else
      return KSPGetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
    }

    template <class Monitor>
    inline PetscErrorCode ksp_monitor_set(KSP ksp, Monitor monitor)
    {
#if (PETSC_VERSION_MINOR >= 7)
      PetscViewerAndFormat *vf;
      PetscErrorCode ierr;
      ierr = PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,&vf);CHKERRQ(ierr);
      ierr = KSPMonitorSet(ksp,(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr);
      return ierr;
#else
      return KSPMonitorSet(ksp, monitor, PETSC_NULL, PETSC_NULL);
#endif
    }

    inline PetscErrorCode mat_create_vecs(Mat mat,Vec *right,Vec *left)
    {
#if (PETSC_VERSION_MINOR >= 6)
      return MatCreateVecs(mat, right, left);
#else
      return MatGetVecs(mat, right, left);
#endif
    }

    inline PetscErrorCode mat_nullspace_remove(MatNullSpace sp,Vec vec)
    {
#if (PETSC_VERSION_MINOR >= 5)
      return MatNullSpaceRemove(sp, vec);
#else
      return MatNullSpaceRemove(sp, vec, PETSC_NULL);
#endif
    }

  } // end namespace petsc
198
} // end namespace AMDiS
199 200

#endif