Am Montag, 13. Mai 2022, finden Wartungsarbeiten am Gitlab-Server (Update auf neue Version statt). Der Dienst wird daher am Montag für einige Zeit nicht verfügbar sein.
On Monday, May 13th 2022, the Gitlab server will be updated. The service will therefore not be accessible for some time on Monday.

PetscHelper.h 3.9 KB
Newer Older
1
2
3
4
5
6
7
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
8
 * Authors:
9
10
11
12
13
14
15
16
17
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
18
 *
19
 ******************************************************************************/
20
21
22
23
24
25
26
27
28
29
30
31



/** \file PetscHelper.h */

#ifndef AMDIS_PETSCHELPER_H
#define AMDIS_PETSCHELPER_H

#include <mpi.h>
#include <map>
#include <vector>
#include <petsc.h>
32
#include "AMDiS_fwd.h"
Praetorius, Simon's avatar
Praetorius, Simon committed
33
#include "utility/PetscWrapper.h"
34

35
36
37
38
namespace AMDiS
{
  namespace Parallel
  {
39

40
    /** \brief
41
    * In this namespace, we collect several auxiliary functions for using
42
    * PETSc in AMDiS. Many of these function may be replaced by new PETSc
43
44
45
46
47
48
    * function in upcoming versions.
    */
    namespace petsc_helper
    {

      /// Defines a PETSc matrix column wise
49
50
      typedef std::pair<std::vector<int>, std::vector<double> > SparseCol;
      typedef std::map<int, SparseCol> PetscMatCol;
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89

      /** \brief
      * Returns for a distributed matrix on each rank the local matrix in a
      * sparce column format.
      *
      * \param[in]   mat     PETSc distributerd matrix.
      * \param[out]  matCol  The sparse column represenation of the local matrix.
      */
      void getMatLocalColumn(Mat mat, PetscMatCol &matCol);

      /** \brief
      * Set a local column vector in a distributed matrix.
      *
      * \param[out]  mat     Distributed matrix.
      * \param[in]   column  Column index.
      * \param[in]   vec     Column vector.
      */
      void setMatLocalColumn(Mat mat, int column, Vec vec);

      /** \brief
      * Create a local PETSc vector representing the column of a matrix
      * stored in \ref PetscMatCol type format.
      *
      * \param[in]   column   Sparse column representation.
      * \param[out]  vec      Vector representing one column of the matrix.
      */
      void getColumnVec(const SparseCol &matCol, Vec vec);

      /** \brief
      * Computes the matrix matrix product inv(A) B = C. Matrices B and C
      * are distributed matrices. Matrix A is a local matrix on each MPI
      * task. The overall number of rows of local matrices A must be the
      * number of distriubted rows in B.
      *
      * \param[in]   mpiComm  MPI Communicator object (must fit with ksp)
      * \param[in]   ksp      inv(A) matrix given by a PETSc solver object.
      * \param[in]   mat0     matrix B
      * \param[out]  mat1     resulting matrix C, is created inside the function
      */
90
91
92
      void blockMatMatSolve(MPI::Intracomm mpiComm,
			    KSP ksp,
			    Mat mat0,
93
94
95
96
97
98
99
100
101
102
			    Mat &mat1);

      /** \brief
      * Converts a 2x2 nested matrix to a MATAIJ matrix (thus not nested).
      *
      * \param[in]  matNest  nested input matrix
      * \param[out] mat      matrix of type MATAIJ, created inside this function.
      */
      void matNestConvert(Mat matNest, Mat &mat);

103
      void setSolverWithLu(KSP ksp,
104
			  const char* kspPrefix,
105
106
			  KSPType kspType,
			  PCType pcType,
107
108
109
110
111
			  const MatSolverPackage matSolverPackage,
			  PetscReal rtol = PETSC_DEFAULT,
			  PetscReal atol = PETSC_DEFAULT,
			  PetscInt maxIt = PETSC_DEFAULT);

112
      void setSolver(KSP ksp,
113
		    const char* kspPrefix,
114
115
		    KSPType kspType,
		    PCType pcType,
116
117
118
		    PetscReal rtol = PETSC_DEFAULT,
		    PetscReal atol = PETSC_DEFAULT,
		    PetscInt maxIt = PETSC_DEFAULT);
119

120
      void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0);
121

122
    } // end namespace petsc_helper
123

124
125
  } // end namespace Parallel
} // end namespace AMDiS