PetscSolver.cc 18.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


Thomas Witkowski's avatar
Thomas Witkowski committed
13
#include "parallel/PetscSolver.h"
14
15
#include "parallel/StdMpi.h"
#include "parallel/ParallelDebug.h"
16
17
18
#include "DOFVector.h"
#include "Debug.h"
#include "SystemVector.h"
19

20
21
22
23
24
25
#include "petscksp.h"

namespace AMDiS {

  PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
  {    
26
    if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
27
28
29
30
31
      std::cout << "[0]  Petsc-Iteration " << iter << ": " << rnorm << std::endl;

    return 0;
  }
 
32

Thomas Witkowski's avatar
Thomas Witkowski committed
33
  void PetscSolver::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
34
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
35
    FUNCNAME("PetscSolver::solve()");
36

37
38
    TEST_EXIT(meshDistributor)("Should not happen!\n");

39
40
41
42
43
#ifdef _OPENMP
    double wtime = omp_get_wtime();
#endif
    clock_t first = clock();

44
    fillPetscMatrix(systemMatrix, rhs);
45
    solvePetscMatrix(*solution, adaptInfo);   
46
47
48
49
50
51
52
53
54
55
56
57

#ifdef _OPENMP
    INFO(info, 8)("solution of discrete system needed %.5f seconds system time / %.5f seconds wallclock time\n",
		   TIME_USED(first, clock()),
		   omp_get_wtime() - wtime);
#else
    INFO(info, 8)("solution of discrete system needed %.5f seconds\n",
		   TIME_USED(first, clock()));
#endif    
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
58
59
  void PetscSolver::setDofMatrix(DOFMatrix* mat, int dispMult, 
				 int dispAddRow, int dispAddCol)
60
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
61
    FUNCNAME("PetscSolver::setDofMatrix()");
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89

    TEST_EXIT(mat)("No DOFMatrix!\n");

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits= mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

    traits::col<Matrix>::type col(mat->getBaseMatrix());
    traits::const_value<Matrix>::type value(mat->getBaseMatrix());

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

    std::vector<int> cols;
    std::vector<double> values;
    cols.reserve(300);
    values.reserve(300);

    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the petsc matrix.                                               ===

    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {

      cols.clear();
      values.clear();

      // Global index of the current row dof.
90
      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
91
      // Test if the current row dof is a periodic dof.
92
      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
93
94
      // Calculate petsc row index.
      int rowIndex = globalRowDof * dispMult + dispAddRow;
95
96
97
98
99
100
101
102


      // === Traverse all non zero entries of the row and produce vector cols ===
      // === with the column indices of all row entries and vector values     ===
      // === with the corresponding values.                                   ===

      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {
103
104
105
106
107
	
	// Global index of the current column index.
	int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
	// Calculate the exact position of the column index in the petsc matrix.
	int colIndex = globalColDof * dispMult + dispAddCol;
108

109
	// Set only non zero values.
110
	if (value(*icursor) != 0.0 || rowIndex == colIndex) {
111
112
113

	  // If the current row is not periodic, but the current dof index is periodic,
	  // we have to duplicate the value to the other corresponding periodic columns.
114
 	  if (!periodicRow && meshDistributor->isPeriodicDof(globalColDof)) {
115
116
	    // The value is assign to n matrix entries, therefore, every entry 
	    // has only 1/n value of the original entry.
117
118
	    std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalColDof);
	    double scalFactor = 1.0 / (perAsc.size() + 1.0);
119
120
121
122
123
124

	    // Insert original entry.
 	    cols.push_back(colIndex);
 	    values.push_back(value(*icursor) * scalFactor);

	    // Insert the periodic entries.
125
126
127
	    for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	      int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalColDof);
	      cols.push_back(mappedDof * dispMult + dispAddCol);
128
129
 	      values.push_back(value(*icursor) * scalFactor);
	    }
130

131
 	  } else {	    
132
	    // The col dof index is not periodic, simple add entry.
133
134
135
136
137
138
139
140
141
	    cols.push_back(colIndex);
	    values.push_back(value(*icursor));
	  }
	}
      }


      // === Up to now we have assembled on row. Now, the row must be send to the ===
      // === corresponding rows to the petsc matrix.                              ===
142
     
143
144
      if (periodicRow) {
	// The row dof is periodic, so send dof to all the corresponding rows.
145
146
147
	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);

	double scalFactor = 1.0 / (perAsc.size() + 1.0);
148

149
	for (unsigned int i = 0; i < values.size(); i++)
150
	  values[i] *= scalFactor;	
151

152
153
154
155
	// Send the main row to the petsc matrix.
	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
		     &(cols[0]), &(values[0]), ADD_VALUES);	
 
156
157
158
159
160
161
162
163
164
165
166
167
168
	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	  std::vector<int> perCols;
	  perCols.reserve(300);
	  std::vector<double> perValues;
	  perValues.reserve(300);
	  for (unsigned int i = 0; i < cols.size(); i++) {
	    int tmp = (cols[i] - dispAddCol) / dispMult;

	    if (meshDistributor->isPeriodicDof(tmp, *perIt))
	      perCols.push_back((meshDistributor->getPeriodicMapping(*perIt, tmp) * dispMult) + dispAddCol);
	    else
	      perCols.push_back(cols[i]);
	    
169
170
171
	    perValues.push_back(values[i]);
	  }

172
	  int perRowIndex = (meshDistributor->getPeriodicMapping(*perIt, globalRowDof) * dispMult) + dispAddRow;
173

174
175
	  MatSetValues(petscMatrix, 1, &perRowIndex, perCols.size(), 
		       &(perCols[0]), &(perValues[0]), ADD_VALUES);
176
177
178
179
180
181
182
183
184
185
186
	}

      } else {
	// The row dof is not periodic, simply send the row to the petsc matrix.
	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
		     &(cols[0]), &(values[0]), ADD_VALUES);
      }    
    }
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
187
188
  void PetscSolver::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
				 int dispMult, int dispAdd)
189
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
190
    FUNCNAME("PetscSolver::setDofVector()");
191

192
193
194
195
    // Traverse all used dofs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
      // Calculate global row index of the dof.
196
      DegreeOfFreedom globalRowDof = 
197
	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
198
      // Calculate petsc index of the row dof.
199
      int index = globalRowDof * dispMult + dispAdd;
200

201
202
203
      if (meshDistributor->isPeriodicDof(globalRowDof)) {
	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
	double value = *dofIt / (perAsc.size() + 1.0);
204
205
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);

206
207
208
209
	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	  int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalRowDof);
	  int mappedIndex = mappedDof * dispMult + dispAdd;
	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
210
211
212
213
214
215
	}
      } else {
	// The dof index is not periodic.
	double value = *dofIt;
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
      }
216
    }
217
218
219
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
220
  void PetscSolver::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
221
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
222
    FUNCNAME("PetscSolver::createPetscNnzStructure()");
223
224
225
226

    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");

227
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
    d_nnz = new int[nRankRows];
    o_nnz = new int[nRankRows];
    for (int i = 0; i < nRankRows; i++) {
      d_nnz[i] = 0;
      o_nnz[i] = 0;
    }

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;
    typedef std::vector<std::pair<int, int> > MatrixNnzEntry;

    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
    // that this rank will send to. This nnz entries will be assembled on this rank,
    // but because the row DOFs are not DOFs of this rank they will be send to the
    // owner of the row DOFs.
    std::map<int, MatrixNnzEntry> sendMatrixEntry;

246
247
248
249
250
251
252
253
254

    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
    typedef std::map<int, DofContainer> RankToDofContainer;
    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
    for (RankToDofContainer::iterator it = recvDofs.begin();
	 it != recvDofs.end(); ++it)
      sendMatrixEntry[it->first].resize(0);


255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
    for (int i = 0; i < nComponents; i++) {
      for (int j = 0; j < nComponents; j++) {
 	if ((*mat)[i][j]) {
	  Matrix bmat = (*mat)[i][j]->getBaseMatrix();

	  traits::col<Matrix>::type col(bmat);
	  traits::const_value<Matrix>::type value(bmat);
	  
	  typedef traits::range_generator<row, Matrix>::type cursor_type;
	  typedef traits::range_generator<nz, cursor_type>::type icursor_type;
	  
	  for (cursor_type cursor = begin<row>(bmat), 
		 cend = end<row>(bmat); cursor != cend; ++cursor) {

	    // Map the local row number to the global DOF index and create from it
	    // the global PETSc row index of this DOF.
271
272
	    int petscRowIdx = 
	      meshDistributor->mapLocalToGlobal(*cursor) * nComponents + i;
273

274
	    if (meshDistributor->getIsRankDof(*cursor)) {
275
276
277
278
279

	      // === The current row DOF is a rank dof, so create the corresponding ===
	      // === nnz values directly on rank's nnz data.                        ===

	      // This is the local row index of the local PETSc matrix.
280
281
	      int localPetscRowIdx = 
		petscRowIdx - meshDistributor->getRstart() * nComponents;
282

283
	      TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
284
285
		("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
		 *cursor, meshDistributor->mapLocalToGlobal(*cursor), petscRowIdx, localPetscRowIdx, meshDistributor->getRstart(), nComponents, nRankRows);
286
287
288
289
	      
	      // Traverse all non zero entries in this row.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
290
291
		int petscColIdx = 
		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
292

293
		if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
294
295
		  // The row DOF is a rank DOF, if also the column is a rank DOF, 
		  // increment the d_nnz values for this row, otherwise the o_nnz value.
296
297
		  if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
		      petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
298
299
300
301
302
303
		    d_nnz[localPetscRowIdx]++;
		  else
		    o_nnz[localPetscRowIdx]++;
		}    
	      }
	    } else {
304
305
	      typedef std::map<int, DofContainer> RankToDofContainer;

306
307
308
309
310
311
312
	      // === The current row DOF is not a rank dof, i.e., it will be created ===
	      // === on this rank, but after this it will be send to another rank    ===
	      // === matrix. So we need to send also the corresponding nnz structure ===
	      // === of this row to the corresponding rank.                          ===

	      // Find out who is the member of this DOF.
	      int sendToRank = -1;
313
314
	      for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
		   it != meshDistributor->getRecvDofs().end(); ++it) {
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
		for (DofContainer::iterator dofIt = it->second.begin();
		     dofIt != it->second.end(); ++dofIt) {
		  if (**dofIt == *cursor) {
		    sendToRank = it->first;
		    break;
		  }
		}

		if (sendToRank != -1)
		  break;
	      }

	      TEST_EXIT_DBG(sendToRank != -1)("Should not happen!\n");

	      // Send all non zero entries to the member of the row DOF.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
		if (value(*icursor) != 0.0) {
333
334
		  int petscColIdx = 
		    meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
335
336
337
338
339
340
341
342
343
344
345
346
347
348
		  
		  sendMatrixEntry[sendToRank].
		    push_back(std::make_pair(petscRowIdx, petscColIdx));
		}
	      }

	    } // if (isRankDof[*cursor]) ... else ...
	  } // for each row in mat[i][j]
	} // if mat[i][j]
      } 
    }

    // === Send and recv the nnz row structure to/from other ranks. ===

349
    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
350
    stdMpi.send(sendMatrixEntry);
351
    stdMpi.recv(meshDistributor->getSendDofs());
352
353
    stdMpi.startCommunication<int>(MPI_INT);

354

355
356
357
358
359
360
361
362
363
364
    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
    // === the PETSc nnz data structure.                                           ===

    for (std::map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
	 it != stdMpi.getRecvData().end(); ++it) {
      if (it->second.size() > 0) {
	for (unsigned int i = 0; i < it->second.size(); i++) {
	  int r = it->second[i].first;
	  int c = it->second[i].second;

365
	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
366
367
368
369
370

	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
	     r, localRowIdx, nRankRows, it->first);
	  
371
372
	  if (c < meshDistributor->getRstart() * nComponents || 
	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
373
374
375
376
377
	    o_nnz[localRowIdx]++;
	  else
	    d_nnz[localRowIdx]++;
	}
      }
378
    }
379
380
381
382
383
384
385
386
387
388

    // The above algorithm for calculating the number of nnz per row over-
    // approximates the value, i.e., the number is always equal or larger to 
    // the real number of nnz values in the global parallel matrix. For small
    // matrices, the problem may arise, that the result is larger than the
    // number of elements in a row. This is fixed in the following.

    if (nRankRows < 100) 
      for (int i = 0; i < nRankRows; i++)
	d_nnz[i] = std::min(d_nnz[i], nRankRows);
389
390
391
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
392
  void PetscSolver::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
393
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
394
    FUNCNAME("PetscSolver::fillPetscMatrix()");
395
396

    clock_t first = clock();
397
398
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413

    // === Create PETSc vector (rhs, solution and a temporary vector). ===

    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
    VecSetType(petscRhsVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
    VecSetType(petscSolVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
    VecSetType(petscTmpVec, VECMPI);

414
415
416
417
418
    int recvAllValues = 0;
    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);

    if (!d_nnz || recvAllValues != 0) {
419
420
      if (d_nnz) {
	delete [] d_nnz;
421
	d_nnz = NULL;
422
	delete [] o_nnz;
423
	o_nnz = NULL;
424
425
      }

426
      createPetscNnzStructure(mat);
427
      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
428
    }
429

430

431
432
433
    // === Create PETSc matrix with the computed nnz data structure. ===

    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
434
		    0, d_nnz, 0, o_nnz, &petscMatrix);
435
    
436
#if (DEBUG != 0)
437
    INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", TIME_USED(first, clock()));
438
#endif
439
440
441
442

#if (DEBUG != 0)
    int a, b;
    MatGetOwnershipRange(petscMatrix, &a, &b);
443
444
445
446
    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
      ("Wrong matrix ownership range!\n");
    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
      ("Wrong matrix ownership range!\n");
447
448
#endif

449

450
451
452
453
454
    // === Transfer values from DOF matrices to the PETSc matrix. === 

    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
	if ((*mat)[i][j])
455
456
	  setDofMatrix((*mat)[i][j], nComponents, i, j);	
	
457
#if (DEBUG != 0)
458
    INFO(info, 8)("Fill petsc matrix 2 needed %.5f seconds\n", TIME_USED(first, clock()));
459
#endif
460
461
462
463

    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);

464
465
466
#if (DEBUG != 0)
    INFO(info, 8)("Fill petsc matrix 3 needed %.5f seconds\n", TIME_USED(first, clock()));
#endif
467

468
469
470
471
472
473
474
475
476
477
478
479
    // === Transfer values from DOF vector to the PETSc vector. === 

    for (int i = 0; i < nComponents; i++)
      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscRhsVec);
    VecAssemblyEnd(petscRhsVec);

    INFO(info, 8)("Fill petsc matrix needed %.5f seconds\n", TIME_USED(first, clock()));
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
480
  void PetscSolver::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
481
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
482
    FUNCNAME("PetscSolver::solvePetscMatrix()");
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509

#if 0
    // Set old solution to be initiual guess for petsc solver.
    for (int i = 0; i < nComponents; i++)
      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscSolVec);
    VecAssemblyEnd(petscSolVec);
#endif

    // === Init Petsc solver. ===

    KSP solver;
    KSPCreate(PETSC_COMM_WORLD, &solver);
    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
    KSPSetType(solver, KSPBCGS);
    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
    KSPSetFromOptions(solver);
    // Do not delete the solution vector, use it for the initial guess.
    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);


    // === Run Petsc. ===

    KSPSolve(solver, petscRhsVec, petscSolVec);

510

511
    // === Transfere values from Petsc's solution vectors to the dof vectors.
512

513
514
515
    PetscScalar *vecPointer;
    VecGetArray(petscSolVec, &vecPointer);

516
    int nRankDofs = meshDistributor->getNumberRankDofs();
517
    for (int i = 0; i < nComponents; i++) {
518
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
519
      for (int j = 0; j < nRankDofs; j++)
520
	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
521
	  vecPointer[j * nComponents + i]; 
522
523
524
525
526
527
528
    }

    VecRestoreArray(petscSolVec, &vecPointer);


    // === Synchronize dofs at common dofs, i.e., dofs that correspond to more ===
    // === than one partition.                                                 ===
529
    meshDistributor->synchVector(vec);
530
531
532
533
534
535
536


    // === Print information about solution process. ===

    int iterations = 0;
    KSPGetIterationNumber(solver, &iterations);
    MSG("  Number of iterations: %d\n", iterations);
537
538
    adaptInfo->setSolverIterations(iterations);

539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
    double norm = 0.0;
    MatMult(petscMatrix, petscSolVec, petscTmpVec);
    VecAXPY(petscTmpVec, -1.0, petscRhsVec);
    VecNorm(petscTmpVec, NORM_2, &norm);
    MSG("  Residual norm: %e\n", norm);


    // === Destroy Petsc's variables. ===

    MatDestroy(petscMatrix);
    VecDestroy(petscRhsVec);
    VecDestroy(petscSolVec);
    VecDestroy(petscTmpVec);
    KSPDestroy(solver);
  }

}