PetscSolver.h 7.46 KB
Newer Older
1
2
3
4
// ============================================================================
// ==                                                                        ==
// == AMDiS - Adaptive multidimensional simulations                          ==
// ==                                                                        ==
5
// ==  http://www.amdis-fem.org                                              ==
6
7
// ==                                                                        ==
// ============================================================================
8
9
10
11
12
13
14
15
16
17
18
19
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


20

Thomas Witkowski's avatar
Thomas Witkowski committed
21
/** \file PetscSolver.h */
22

23
24
#ifndef AMDIS_PETSC_SOLVER_H
#define AMDIS_PETSC_SOLVER_H
25

26
27
28
29
#include <set>
#include <map>
#include <mpi.h>

30
31
#include "AMDiS_fwd.h"
#include "Global.h"
32
#include "Initfile.h"
33
34
#include "DOFMatrix.h"
#include "parallel/MeshDistributor.h"
35
#include "parallel/ParallelCoarseSpaceMatVec.h"
36
37
38
#include <petsc.h>
#include <petscsys.h>
#include <petscao.h>
39
#include <petscksp.h>
40
41
42

namespace AMDiS {

43
44
  using namespace std;

45

46
47
  class PetscSolver
  {
48
  public:
49
    PetscSolver();
50

51
    virtual ~PetscSolver() {}
52

53
54
55
    void setMeshDistributor(MeshDistributor *m,
			    MPI::Intracomm mpiComm0,
			    MPI::Intracomm mpiComm1)
56
57
    {
      meshDistributor = m;
58
59
60
      mpiCommGlobal = mpiComm0;
      mpiCommLocal = mpiComm1; 
      mpiRank = mpiCommGlobal.Get_rank();
61
62
    }

63
64
65
66
67
    void setLevel(int l) 
    {
      subdomainLevel = l;
    }

Thomas Witkowski's avatar
Thomas Witkowski committed
68
69
70
71
72
73
74
75
    /// Set parallel DOF mapping for the interior DOFs.
    void setDofMapping(ParallelDofMapping *interiorDofs)
    {
      interiorMap = interiorDofs;
    }
    
    void setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs, 
				  int component = -1);
76

77
    /** \brief
78
79
     * Create a PETSc matrix. The given DOF matrices are used to create the nnz 
     * structure of the PETSc matrix and the values are transfered to it.
80
81
     *
     * \param[in] mat
82
83
84
85
86
87
     */
    virtual void fillPetscMatrix(Matrix<DOFMatrix*> *mat) = 0;

    /** \brief
     * Create a PETSc vector and fills it with the rhs values of the system.
     *
88
89
     * \param[in] vec
     */
90
    virtual void fillPetscRhs(SystemVector *vec) = 0;
91

92
93
    /// Use PETSc to solve the linear system of equations
    virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo) = 0;
94

95
96
97
98
    virtual void solve(Vec &rhs, Vec &sol);

    virtual void solveGlobal(Vec &rhs, Vec &sol);

99
100
101
    /// Destroys all matrix data structures.
    virtual void destroyMatrixData() = 0;

102
103
104
    /// Detroys all vector data structures.
    virtual void destroyVectorData() = 0;

Thomas Witkowski's avatar
Thomas Witkowski committed
105
106
107
108
109
    virtual Flag getBoundaryDofRequirement()
    {
      return 0;
    }

110
111
    KSP getSolver() 
    { 
112
      return kspInterior; 
113
    }
114

115
116
    PC getPc() 
    { 
117
      return pcInterior; 
118
    }
119

120
121
122
123
124
    void setKspPrefix(std::string s)
    {
      kspPrefix = s;
    }

Thomas Witkowski's avatar
Thomas Witkowski committed
125
    void setRemoveRhsNullspace(bool b)
126
    {
Thomas Witkowski's avatar
Thomas Witkowski committed
127
      removeRhsNullspace = b;
128
129
    }

130
131
132
133
134
135
    /// Adds a new vector to the basis of the operator's nullspace.
    void addNullspaceVector(SystemVector *vec)
    {
      nullspace.push_back(vec);
    }

136
137
138
139
140
141
142
143
144
145
146
147
148
    /// Sets the nullspace to be constant for some specific components.
    void setConstantNullspace(vector<int> &components)
    {
      constNullspaceComponent = components;
    }

    /// Sets the nullspace to be constant for a specific component.
    void setConstantNullspace(int component)
    {
      constNullspaceComponent.clear();
      constNullspaceComponent.push_back(component);
    }

Thomas Witkowski's avatar
Thomas Witkowski committed
149
150
    inline bool isCoarseSpace(int component,
			      const FiniteElemSpace *feSpace, 
151
152
			      DegreeOfFreedom dof)
    {
153
      FUNCNAME("PetscSolver::isCoarseSpace()");
154
      
Thomas Witkowski's avatar
Thomas Witkowski committed
155
      if (coarseSpaceMap.empty())
156
	return false;
157

Thomas Witkowski's avatar
Thomas Witkowski committed
158
159
160
161
      TEST_EXIT_DBG(coarseSpaceMap.count(component))
	("Component %d has no coarse space defined!\n", component);

      return (*(coarseSpaceMap[component]))[feSpace].isSet(dof);
162
163
164
165
    }

    inline Vec& getRhsCoarseSpace()
    {
166
      FUNCNAME("PetscSolver::getRhsCoarseSpace()");
Thomas Witkowski's avatar
Thomas Witkowski committed
167
168

      TEST_EXIT_DBG(coarseSpaceMap.size())
169
170
	("Subdomain solver does not contain a coarse space!\n");

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
171
      return petscData.getCoarseVecRhs();
172
173
174
175
    }

    inline Vec& getRhsInterior()
    {
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
      return petscData.getInteriorVecRhs();
    }

    inline Vec& getSolCoarseSpace()
    {
      FUNCNAME("PetscSolver::getSolCoarseSpace()");

      TEST_EXIT_DBG(coarseSpaceMap.size())
	("Subdomain solver does not contain a coarse space!\n");

      return petscData.getCoarseVecSol();
    }

    inline Vec& getSolInterior()
    {
      return petscData.getInteriorVecSol();
192
193
194
195
    }

    inline Mat& getMatIntInt()
    {
196
      return petscData.getInteriorMat();
197
198
199
200
    }

    inline Mat& getMatCoarseCoarse()
    {
201
      FUNCNAME("PetscSolver::getMatCoarseCoarse()");
Thomas Witkowski's avatar
Thomas Witkowski committed
202

203
      TEST_EXIT_DBG(coarseSpaceMap.size())
204
205
	("Subdomain solver does not contain a coarse space!\n");

206
      return petscData.getCoarseMat(0);
207
    }
208

209
210
    inline Mat& getMatIntCoarse()
    {
211
      FUNCNAME("PetscSolver::getMatIntCoarse()");
Thomas Witkowski's avatar
Thomas Witkowski committed
212

213
      TEST_EXIT_DBG(coarseSpaceMap.size())
214
215
	("Subdomain solver does not contain a coarse space!\n");

216
      return petscData.getIntCoarseMat();
217
218
219
220
    }

    inline Mat& getMatCoarseInt()
    {
221
      FUNCNAME("PetscSolver::getMatCoarseInt()");
Thomas Witkowski's avatar
Thomas Witkowski committed
222

223
      TEST_EXIT_DBG(coarseSpaceMap.size())
224
	("Subdomain solver does not contain a coarse space!\n");
225
226
      
      return petscData.getCoarseIntMat();
227
228
229
    }

  protected:
230
231
232
233
234
235
236
237
238
239
240
241
242
243
    /** \brief
     * Copies between to PETSc vectors by using different index sets for the
     * origin and the destination vectors.
     *
     * \param[in]   originVec    The PETSc vector from which we copy from.
     * \param[out]  destVec      The PETSc vector we copy too.
     * \param[in]   originIndex  Set of global indices referring to the 
     *                           origin vector.
     * \param[in]   destIndex    Set of global indices referring to the
     *                           destination vector.
     */
    void copyVec(Vec& originVec, Vec& destVec, 
		 vector<int>& originIndex, vector<int>& destIndex);

244
245
246
247
248
249
    /// Checks if all given FE spaces are handled by the mesh distributor.
    void checkFeSpaces(vector<const FiniteElemSpace*>& feSpaces);

    /// Returns a vector with the FE spaces of all components of a system vector.
    vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec);

250
  protected:
251
    MeshDistributor *meshDistributor;
252

253
254
255
256
    int subdomainLevel;

    ParallelDofMapping *interiorMap;

Thomas Witkowski's avatar
Thomas Witkowski committed
257
258
    /// Parallel DOF mapping of the (optional) coarse space. Allows to define
    /// different coarse spaces for different components.
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
259
    map<int, ParallelDofMapping*> coarseSpaceMap;    
260

261
    int mpiRank;
262

263
    MPI::Intracomm mpiCommGlobal;
264

265
    MPI::Intracomm mpiCommLocal;
266

267
268
269
    /// Petsc's matrices and vectors (possiblly block structured if there is
    /// a coarse space defined).
    ParallelCoarseSpaceMatVec petscData;
270

271
    /// PETSc solver object
272
    KSP kspInterior;
273

274
    /// PETSc preconditioner object
275
    PC pcInterior;
276

277
278
279
    /// A set of vectors that span the null space of the operator.
    vector<SystemVector*> nullspace;

280
281
    /// KSP database prefix
    string kspPrefix;
282
283
284

    /// If true, the constant null space is projected out of the RHS vector. It
    /// depends on the specific PETSc solver if it considers this value.
Thomas Witkowski's avatar
Thomas Witkowski committed
285
286
287
    bool removeRhsNullspace;

    bool hasConstantNullspace;
288
289

    vector<int> constNullspaceComponent;
290
291
  };

292

293
} // namespace AMDiS
294
295

#endif