PetscSolverGlobalBlockMatrix.cc 6.72 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

namespace AMDiS {

  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");

    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
    TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");

    double wtime = MPI::Wtime();
    nComponents = mat->getNumRows();
28
29
    int nRankRows = meshDistributor->getNumberRankDofs();
    int nOverallRows = meshDistributor->getNumberOverallDofs();
30
31
32
33
34

#if (DEBUG != 0)
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

35
    nestMat.resize(nComponents * nComponents);
36
37
38
39
40
41
42
43
44
45

    // === Transfer values from DOF matrices to the PETSc matrix. === 

    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
	if ((*mat)[i][j]) {
	  MatCreateMPIAIJ(PETSC_COMM_WORLD,
			  nRankRows, nRankRows,
			  nOverallRows, nOverallRows,
			  30, PETSC_NULL, 30, PETSC_NULL,
46
47
48
49
			  &(nestMat[i * nComponents + j]));
	  setDofMatrix(nestMat[i * nComponents + j], (*mat)[i][j]);
	  MatAssemblyBegin(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[i * nComponents + j], MAT_FINAL_ASSEMBLY);
50
	} else {
51
	  nestMat[i * nComponents + j] = PETSC_NULL;
52
53
54
55
	}

    MatCreateNest(PETSC_COMM_WORLD, 
		  nComponents, PETSC_NULL, nComponents, PETSC_NULL, 
56
		  &(nestMat[0]), &petscMatrix);
57
58
59
60
61
62
63
64
65
66
67
68

#if (DEBUG != 0)
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);

    // === Init PETSc solver. ===
    KSPCreate(PETSC_COMM_WORLD, &solver);
    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
    KSPSetFromOptions(solver);
69
70
71

    KSPGetPC(solver, &pc);
    setBlockPreconditioner(pc);
72
73
74
75
76
77
78
79
80
81
82
83

    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
84
85
    int nRankRows = meshDistributor->getNumberRankDofs();
    int nOverallRows = meshDistributor->getNumberOverallDofs();
86

87
    nestVec.resize(nComponents);
88

89
90
    for (int i = 0; i < nComponents; i++) {
      VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &(nestVec[i]));
91

92
93
94
95
96
97
98
99
      setDofVector(nestVec[i], vec->getDOFVector(i));
      
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

    VecCreateNest(PETSC_COMM_WORLD, nComponents, PETSC_NULL, 
		  &(nestVec[0]), &petscRhsVec);
100
101
102
103
104
105
106
107
108
109
110
111

    VecAssemblyBegin(petscRhsVec);
    VecAssemblyEnd(petscRhsVec);
  }


  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, 
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

    // PETSc.
112
    KSPSolve(solver, petscRhsVec, petscRhsVec);
113
114
115
116

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
117
118
119
120
121
122
123
124

      Vec tmp;
      VecNestGetSubVec(petscRhsVec, i, &tmp);

      int nRankDofs = meshDistributor->getNumberRankDofs();
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

125
      for (int j = 0; j < nRankDofs; j++)
126
	dofvec[meshDistributor->mapLocalToDofIndex(j)] = vecPointer[j]; 
127

128
129
      VecRestoreArray(tmp, &vecPointer);
    }
130
131
132
133
134
135
136
137
138


    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);


    // === Destroy PETSc's variables. ===
    VecDestroy(&petscRhsVec);
139
140
    for (int i = 0; i < nComponents; i++)
      VecDestroy(&(nestVec[i]));
141
142
143
144
145
146
147
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

148
149
150
151
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
    MatDestroy(&petscMatrix);
    KSPDestroy(&solver);
  }


  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, DOFMatrix* mat)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(mat)("No DOFMatrix!\n");
    TEST_EXIT(petscMat)("No PETSc matrix!\n");

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

    traits::col<Matrix>::type col(mat->getBaseMatrix());
    traits::const_value<Matrix>::type value(mat->getBaseMatrix());

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

    vector<int> cols;
    vector<double> values;
    cols.reserve(300);
    values.reserve(300);
    
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {

      // Global index of the current row DOF.
      int rowIndex = meshDistributor->mapLocalToGlobal(*cursor);

      cols.clear();
      values.clear();
      
      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {	
	// Global index of the current column index.
	int colIndex = meshDistributor->mapLocalToGlobal(col(*icursor));
	
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
	
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
204
205
206

      MatSetValues(petscMat, 1, &rowIndex, cols.size(), 
 		   &(cols[0]), &(values[0]), ADD_VALUES);	
207
208
209
210
211
    }
  }
  
  
  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, 
212
						  DOFVector<double>* vec)
213
214
215
216
217
218
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
219
      int index = meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
220
221
222
223
224
225
226
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

}