PetscSolver.h 6.53 KB
Newer Older
1 2 3 4
// ============================================================================
// ==                                                                        ==
// == AMDiS - Adaptive multidimensional simulations                          ==
// ==                                                                        ==
5
// ==  http://www.amdis-fem.org                                              ==
6 7
// ==                                                                        ==
// ============================================================================
8 9 10 11 12 13 14 15 16 17 18 19
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


20

Thomas Witkowski's avatar
Thomas Witkowski committed
21
/** \file PetscSolver.h */
22

23 24
#ifndef AMDIS_PETSC_SOLVER_H
#define AMDIS_PETSC_SOLVER_H
25

26 27 28 29
#include <set>
#include <map>
#include <mpi.h>

30 31
#include "AMDiS_fwd.h"
#include "Global.h"
32
#include "Initfile.h"
33 34 35
#include "DOFMatrix.h"
#include "parallel/MeshDistributor.h"

36 37 38
#include <petsc.h>
#include <petscsys.h>
#include <petscao.h>
39
#include <petscksp.h>
40 41 42

namespace AMDiS {

43 44
  using namespace std;

45

46 47
  class PetscSolver
  {
48
  public:
49
    PetscSolver();
50

51
    virtual ~PetscSolver() {}
52

53 54 55
    void setMeshDistributor(MeshDistributor *m,
			    MPI::Intracomm mpiComm0,
			    MPI::Intracomm mpiComm1)
56 57
    {
      meshDistributor = m;
58 59 60
      mpiCommGlobal = mpiComm0;
      mpiCommLocal = mpiComm1; 
      mpiRank = mpiCommGlobal.Get_rank();
61 62
    }

63 64 65 66 67 68 69 70
    void setLevel(int l) 
    {
      subdomainLevel = l;
    }

    void setDofMapping(ParallelDofMapping *interiorDofs,
		       ParallelDofMapping *coarseDofs = NULL);

71
    /** \brief
72 73
     * Create a PETSc matrix. The given DOF matrices are used to create the nnz 
     * structure of the PETSc matrix and the values are transfered to it.
74 75
     *
     * \param[in] mat
76 77 78 79 80 81
     */
    virtual void fillPetscMatrix(Matrix<DOFMatrix*> *mat) = 0;

    /** \brief
     * Create a PETSc vector and fills it with the rhs values of the system.
     *
82 83
     * \param[in] vec
     */
84
    virtual void fillPetscRhs(SystemVector *vec) = 0;
85

86 87
    /// Use PETSc to solve the linear system of equations
    virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
88

89 90 91 92
    virtual void solve(Vec &rhs, Vec &sol);

    virtual void solveGlobal(Vec &rhs, Vec &sol);

93 94 95
    /// Destroys all matrix data structures.
    virtual void destroyMatrixData() = 0;

96 97 98
    /// Detroys all vector data structures.
    virtual void destroyVectorData() = 0;

Thomas Witkowski's avatar
Thomas Witkowski committed
99 100 101 102 103
    virtual Flag getBoundaryDofRequirement()
    {
      return 0;
    }

104 105
    KSP getSolver() 
    { 
106
      return kspInterior; 
107
    }
108

109 110
    PC getPc() 
    { 
111
      return pcInterior; 
112
    }
113

114 115 116 117 118
    void setKspPrefix(std::string s)
    {
      kspPrefix = s;
    }

Thomas Witkowski's avatar
Thomas Witkowski committed
119
    void setRemoveRhsNullspace(bool b)
120
    {
Thomas Witkowski's avatar
Thomas Witkowski committed
121
      removeRhsNullspace = b;
122 123
    }

124 125 126 127 128 129
    /// Adds a new vector to the basis of the operator's nullspace.
    void addNullspaceVector(SystemVector *vec)
    {
      nullspace.push_back(vec);
    }

130 131 132
    inline bool isCoarseSpace(const FiniteElemSpace *feSpace, 
			      DegreeOfFreedom dof)
    {
133
      FUNCNAME("PetscSolver::isCoarseSpace()");
134 135 136
      
      if (coarseSpaceMap == NULL)
	return false;
137 138 139 140 141 142

      return (*coarseSpaceMap)[feSpace].isSet(dof);
    }

    inline Vec& getRhsCoarseSpace()
    {
143
      FUNCNAME("PetscSolver::getRhsCoarseSpace()");
144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
      TEST_EXIT_DBG(coarseSpaceMap)
	("Subdomain solver does not contain a coarse space!\n");

      return rhsCoarseSpace;
    }

    inline Vec& getRhsInterior()
    {
      return rhsInterior;
    }

    inline Mat& getMatIntInt()
    {
      return matIntInt;
    }

    inline Mat& getMatCoarseCoarse()
    {
162
      FUNCNAME("PetscSolver::getMatCoarseCoarse()");
163 164 165 166 167
      TEST_EXIT_DBG(coarseSpaceMap)
	("Subdomain solver does not contain a coarse space!\n");

      return matCoarseCoarse;
    }
168

169 170
    inline Mat& getMatIntCoarse()
    {
171
      FUNCNAME("PetscSolver::getMatIntCoarse()");
172 173 174 175 176 177 178 179
      TEST_EXIT_DBG(coarseSpaceMap)
	("Subdomain solver does not contain a coarse space!\n");

      return matIntCoarse;
    }

    inline Mat& getMatCoarseInt()
    {
180
      FUNCNAME("PetscSolver::getMatCoarseInt()");
181 182 183 184 185 186 187
      TEST_EXIT_DBG(coarseSpaceMap)
	("Subdomain solver does not contain a coarse space!\n");

      return matCoarseInt;
    }

  protected:
188 189 190 191 192 193 194 195 196 197 198 199 200 201
    /** \brief
     * Copies between to PETSc vectors by using different index sets for the
     * origin and the destination vectors.
     *
     * \param[in]   originVec    The PETSc vector from which we copy from.
     * \param[out]  destVec      The PETSc vector we copy too.
     * \param[in]   originIndex  Set of global indices referring to the 
     *                           origin vector.
     * \param[in]   destIndex    Set of global indices referring to the
     *                           destination vector.
     */
    void copyVec(Vec& originVec, Vec& destVec, 
		 vector<int>& originIndex, vector<int>& destIndex);

202 203 204 205 206 207 208 209 210 211
    /// Checks if all given FE spaces are handled by the mesh distributor.
    void checkFeSpaces(vector<const FiniteElemSpace*>& feSpaces);

    /// Returns a vector with the FE spaces of each row in the matrix. Thus, the
    /// resulting vector may contain the same FE space multiple times.
    vector<const FiniteElemSpace*> getFeSpaces(Matrix<DOFMatrix*> *mat);

    /// Returns a vector with the FE spaces of all components of a system vector.
    vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec);

Thomas Witkowski's avatar
Thomas Witkowski committed
212 213
    void updateSubdomainData();

214
  protected:
215
    MeshDistributor *meshDistributor;
216

217 218 219 220 221 222 223 224 225
    int subdomainLevel;

    int rStartInterior;

    int nGlobalOverallInterior;

    ParallelDofMapping *interiorMap;

    ParallelDofMapping* coarseSpaceMap;
226

227
    int mpiRank;
228

229
    MPI::Intracomm mpiCommGlobal;
230

231
    MPI::Intracomm mpiCommLocal;
232

233
    /// Petsc's matrix structure.
234
    Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
235

236 237
    /// PETSc's vector structures for the rhs vector, the solution vector and a
    /// temporary vector for calculating the final residuum.
238 239 240
    Vec rhsInterior;

    Vec rhsCoarseSpace;
241

242
    /// PETSc solver object
243
    KSP kspInterior;
244

245
    /// PETSc preconditioner object
246
    PC pcInterior;
247

248 249 250
    /// A set of vectors that span the null space of the operator.
    vector<SystemVector*> nullspace;

251 252
    /// KSP database prefix
    string kspPrefix;
253 254 255

    /// If true, the constant null space is projected out of the RHS vector. It
    /// depends on the specific PETSc solver if it considers this value.
Thomas Witkowski's avatar
Thomas Witkowski committed
256 257 258
    bool removeRhsNullspace;

    bool hasConstantNullspace;
259 260
  };

261

262
} // namespace AMDiS
263 264

#endif