PetscSolver.cc 18.6 KB
Newer Older
Thomas Witkowski's avatar
Thomas Witkowski committed
1
#include "parallel/PetscSolver.h"
2 3
#include "parallel/StdMpi.h"
#include "parallel/ParallelDebug.h"
4 5 6
#include "DOFVector.h"
#include "Debug.h"
#include "SystemVector.h"
7

8 9 10 11 12 13
#include "petscksp.h"

namespace AMDiS {

  PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
  {    
14
    if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
15 16 17 18 19
      std::cout << "[0]  Petsc-Iteration " << iter << ": " << rnorm << std::endl;

    return 0;
  }
 
20

Thomas Witkowski's avatar
Thomas Witkowski committed
21
  void PetscSolver::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
22
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
23
    FUNCNAME("PetscSolver::solve()");
24

25 26
    TEST_EXIT(meshDistributor)("Should not happen!\n");

27 28 29 30 31
#ifdef _OPENMP
    double wtime = omp_get_wtime();
#endif
    clock_t first = clock();

32
    fillPetscMatrix(systemMatrix, rhs);
33
    solvePetscMatrix(*solution, adaptInfo);   
34 35 36 37 38 39 40 41 42 43 44 45

#ifdef _OPENMP
    INFO(info, 8)("solution of discrete system needed %.5f seconds system time / %.5f seconds wallclock time\n",
		   TIME_USED(first, clock()),
		   omp_get_wtime() - wtime);
#else
    INFO(info, 8)("solution of discrete system needed %.5f seconds\n",
		   TIME_USED(first, clock()));
#endif    
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
46 47
  void PetscSolver::setDofMatrix(DOFMatrix* mat, int dispMult, 
				 int dispAddRow, int dispAddCol)
48
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
49
    FUNCNAME("PetscSolver::setDofMatrix()");
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77

    TEST_EXIT(mat)("No DOFMatrix!\n");

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits= mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

    traits::col<Matrix>::type col(mat->getBaseMatrix());
    traits::const_value<Matrix>::type value(mat->getBaseMatrix());

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

    std::vector<int> cols;
    std::vector<double> values;
    cols.reserve(300);
    values.reserve(300);

    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the petsc matrix.                                               ===

    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {

      cols.clear();
      values.clear();

      // Global index of the current row dof.
78
      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
79
      // Test if the current row dof is a periodic dof.
80
      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
81 82
      // Calculate petsc row index.
      int rowIndex = globalRowDof * dispMult + dispAddRow;
83 84 85 86 87 88 89 90


      // === Traverse all non zero entries of the row and produce vector cols ===
      // === with the column indices of all row entries and vector values     ===
      // === with the corresponding values.                                   ===

      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {
91 92 93 94 95
	
	// Global index of the current column index.
	int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
	// Calculate the exact position of the column index in the petsc matrix.
	int colIndex = globalColDof * dispMult + dispAddCol;
96

97
	// Set only non zero values.
98
	if (value(*icursor) != 0.0 || rowIndex == colIndex) {
99 100 101

	  // If the current row is not periodic, but the current dof index is periodic,
	  // we have to duplicate the value to the other corresponding periodic columns.
102
 	  if (!periodicRow && meshDistributor->isPeriodicDof(globalColDof)) {
103 104
	    // The value is assign to n matrix entries, therefore, every entry 
	    // has only 1/n value of the original entry.
105 106
	    std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalColDof);
	    double scalFactor = 1.0 / (perAsc.size() + 1.0);
107 108 109 110 111 112

	    // Insert original entry.
 	    cols.push_back(colIndex);
 	    values.push_back(value(*icursor) * scalFactor);

	    // Insert the periodic entries.
113 114 115
	    for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	      int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalColDof);
	      cols.push_back(mappedDof * dispMult + dispAddCol);
116 117
 	      values.push_back(value(*icursor) * scalFactor);
	    }
118

119
 	  } else {	    
120
	    // The col dof index is not periodic, simple add entry.
121 122 123 124 125 126 127 128 129
	    cols.push_back(colIndex);
	    values.push_back(value(*icursor));
	  }
	}
      }


      // === Up to now we have assembled on row. Now, the row must be send to the ===
      // === corresponding rows to the petsc matrix.                              ===
130
     
131 132
      if (periodicRow) {
	// The row dof is periodic, so send dof to all the corresponding rows.
133 134 135
	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);

	double scalFactor = 1.0 / (perAsc.size() + 1.0);
136

137
	for (unsigned int i = 0; i < values.size(); i++)
138
	  values[i] *= scalFactor;	
139

140 141 142 143
	// Send the main row to the petsc matrix.
	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
		     &(cols[0]), &(values[0]), ADD_VALUES);	
 
144 145 146 147 148 149 150 151 152 153 154 155 156
	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	  std::vector<int> perCols;
	  perCols.reserve(300);
	  std::vector<double> perValues;
	  perValues.reserve(300);
	  for (unsigned int i = 0; i < cols.size(); i++) {
	    int tmp = (cols[i] - dispAddCol) / dispMult;

	    if (meshDistributor->isPeriodicDof(tmp, *perIt))
	      perCols.push_back((meshDistributor->getPeriodicMapping(*perIt, tmp) * dispMult) + dispAddCol);
	    else
	      perCols.push_back(cols[i]);
	    
157 158 159
	    perValues.push_back(values[i]);
	  }

160
	  int perRowIndex = (meshDistributor->getPeriodicMapping(*perIt, globalRowDof) * dispMult) + dispAddRow;
161

162 163
	  MatSetValues(petscMatrix, 1, &perRowIndex, perCols.size(), 
		       &(perCols[0]), &(perValues[0]), ADD_VALUES);
164 165 166 167 168 169 170 171 172 173 174
	}

      } else {
	// The row dof is not periodic, simply send the row to the petsc matrix.
	MatSetValues(petscMatrix, 1, &rowIndex, cols.size(), 
		     &(cols[0]), &(values[0]), ADD_VALUES);
      }    
    }
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
175 176
  void PetscSolver::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
				 int dispMult, int dispAdd)
177
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
178
    FUNCNAME("PetscSolver::setDofVector()");
179

180 181 182 183
    // Traverse all used dofs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
      // Calculate global row index of the dof.
184
      DegreeOfFreedom globalRowDof = 
185
	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
186
      // Calculate petsc index of the row dof.
187
      int index = globalRowDof * dispMult + dispAdd;
188

189 190 191
      if (meshDistributor->isPeriodicDof(globalRowDof)) {
	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
	double value = *dofIt / (perAsc.size() + 1.0);
192 193
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);

194 195 196 197
	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	  int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalRowDof);
	  int mappedIndex = mappedDof * dispMult + dispAdd;
	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
198 199 200 201 202 203
	}
      } else {
	// The dof index is not periodic.
	double value = *dofIt;
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
      }
204
    }
205 206 207
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
208
  void PetscSolver::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
209
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
210
    FUNCNAME("PetscSolver::createPetscNnzStructure()");
211 212 213 214

    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");

215
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
    d_nnz = new int[nRankRows];
    o_nnz = new int[nRankRows];
    for (int i = 0; i < nRankRows; i++) {
      d_nnz[i] = 0;
      o_nnz[i] = 0;
    }

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;
    typedef std::vector<std::pair<int, int> > MatrixNnzEntry;

    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
    // that this rank will send to. This nnz entries will be assembled on this rank,
    // but because the row DOFs are not DOFs of this rank they will be send to the
    // owner of the row DOFs.
    std::map<int, MatrixNnzEntry> sendMatrixEntry;

234 235 236 237 238 239 240 241 242

    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
    typedef std::map<int, DofContainer> RankToDofContainer;
    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
    for (RankToDofContainer::iterator it = recvDofs.begin();
	 it != recvDofs.end(); ++it)
      sendMatrixEntry[it->first].resize(0);


243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258
    for (int i = 0; i < nComponents; i++) {
      for (int j = 0; j < nComponents; j++) {
 	if ((*mat)[i][j]) {
	  Matrix bmat = (*mat)[i][j]->getBaseMatrix();

	  traits::col<Matrix>::type col(bmat);
	  traits::const_value<Matrix>::type value(bmat);
	  
	  typedef traits::range_generator<row, Matrix>::type cursor_type;
	  typedef traits::range_generator<nz, cursor_type>::type icursor_type;
	  
	  for (cursor_type cursor = begin<row>(bmat), 
		 cend = end<row>(bmat); cursor != cend; ++cursor) {

	    // Map the local row number to the global DOF index and create from it
	    // the global PETSc row index of this DOF.
259 260
	    int petscRowIdx = 
	      meshDistributor->mapLocalToGlobal(*cursor) * nComponents + i;
261

262
	    if (meshDistributor->getIsRankDof(*cursor)) {
263 264 265 266 267

	      // === The current row DOF is a rank dof, so create the corresponding ===
	      // === nnz values directly on rank's nnz data.                        ===

	      // This is the local row index of the local PETSc matrix.
268 269
	      int localPetscRowIdx = 
		petscRowIdx - meshDistributor->getRstart() * nComponents;
270

271
	      TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
272 273
		("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
		 *cursor, meshDistributor->mapLocalToGlobal(*cursor), petscRowIdx, localPetscRowIdx, meshDistributor->getRstart(), nComponents, nRankRows);
274 275 276 277
	      
	      // Traverse all non zero entries in this row.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
278 279
		int petscColIdx = 
		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
280

281
		if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
282 283
		  // The row DOF is a rank DOF, if also the column is a rank DOF, 
		  // increment the d_nnz values for this row, otherwise the o_nnz value.
284 285
		  if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
		      petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
286 287 288 289 290 291
		    d_nnz[localPetscRowIdx]++;
		  else
		    o_nnz[localPetscRowIdx]++;
		}    
	      }
	    } else {
292 293
	      typedef std::map<int, DofContainer> RankToDofContainer;

294 295 296 297 298 299 300
	      // === The current row DOF is not a rank dof, i.e., it will be created ===
	      // === on this rank, but after this it will be send to another rank    ===
	      // === matrix. So we need to send also the corresponding nnz structure ===
	      // === of this row to the corresponding rank.                          ===

	      // Find out who is the member of this DOF.
	      int sendToRank = -1;
301 302
	      for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
		   it != meshDistributor->getRecvDofs().end(); ++it) {
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320
		for (DofContainer::iterator dofIt = it->second.begin();
		     dofIt != it->second.end(); ++dofIt) {
		  if (**dofIt == *cursor) {
		    sendToRank = it->first;
		    break;
		  }
		}

		if (sendToRank != -1)
		  break;
	      }

	      TEST_EXIT_DBG(sendToRank != -1)("Should not happen!\n");

	      // Send all non zero entries to the member of the row DOF.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
		if (value(*icursor) != 0.0) {
321 322
		  int petscColIdx = 
		    meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
323 324 325 326 327 328 329 330 331 332 333 334 335 336
		  
		  sendMatrixEntry[sendToRank].
		    push_back(std::make_pair(petscRowIdx, petscColIdx));
		}
	      }

	    } // if (isRankDof[*cursor]) ... else ...
	  } // for each row in mat[i][j]
	} // if mat[i][j]
      } 
    }

    // === Send and recv the nnz row structure to/from other ranks. ===

337
    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
338
    stdMpi.send(sendMatrixEntry);
339
    stdMpi.recv(meshDistributor->getSendDofs());
340 341
    stdMpi.startCommunication<int>(MPI_INT);

342

343 344 345 346 347 348 349 350 351 352
    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
    // === the PETSc nnz data structure.                                           ===

    for (std::map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
	 it != stdMpi.getRecvData().end(); ++it) {
      if (it->second.size() > 0) {
	for (unsigned int i = 0; i < it->second.size(); i++) {
	  int r = it->second[i].first;
	  int c = it->second[i].second;

353
	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
354 355 356 357 358

	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
	     r, localRowIdx, nRankRows, it->first);
	  
359 360
	  if (c < meshDistributor->getRstart() * nComponents || 
	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
361 362 363 364 365
	    o_nnz[localRowIdx]++;
	  else
	    d_nnz[localRowIdx]++;
	}
      }
366
    }
367 368 369 370 371 372 373 374 375 376

    // The above algorithm for calculating the number of nnz per row over-
    // approximates the value, i.e., the number is always equal or larger to 
    // the real number of nnz values in the global parallel matrix. For small
    // matrices, the problem may arise, that the result is larger than the
    // number of elements in a row. This is fixed in the following.

    if (nRankRows < 100) 
      for (int i = 0; i < nRankRows; i++)
	d_nnz[i] = std::min(d_nnz[i], nRankRows);
377 378 379
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
380
  void PetscSolver::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
381
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
382
    FUNCNAME("PetscSolver::fillPetscMatrix()");
383 384

    clock_t first = clock();
385 386
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
387 388 389 390 391 392 393 394 395 396 397 398 399 400 401

    // === Create PETSc vector (rhs, solution and a temporary vector). ===

    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
    VecSetType(petscRhsVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
    VecSetType(petscSolVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
    VecSetType(petscTmpVec, VECMPI);

402 403 404 405 406
    int recvAllValues = 0;
    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);

    if (!d_nnz || recvAllValues != 0) {
407 408
      if (d_nnz) {
	delete [] d_nnz;
409
	d_nnz = NULL;
410
	delete [] o_nnz;
411
	o_nnz = NULL;
412 413
      }

414
      createPetscNnzStructure(mat);
415
      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
416
    }
417

418

419 420 421
    // === Create PETSc matrix with the computed nnz data structure. ===

    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
422
		    0, d_nnz, 0, o_nnz, &petscMatrix);
423
    
424
#if (DEBUG != 0)
425
    INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", TIME_USED(first, clock()));
426
#endif
427 428 429 430

#if (DEBUG != 0)
    int a, b;
    MatGetOwnershipRange(petscMatrix, &a, &b);
431 432 433 434
    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
      ("Wrong matrix ownership range!\n");
    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
      ("Wrong matrix ownership range!\n");
435 436
#endif

437

438 439 440 441 442
    // === Transfer values from DOF matrices to the PETSc matrix. === 

    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
	if ((*mat)[i][j])
443 444
	  setDofMatrix((*mat)[i][j], nComponents, i, j);	
	
445
#if (DEBUG != 0)
446
    INFO(info, 8)("Fill petsc matrix 2 needed %.5f seconds\n", TIME_USED(first, clock()));
447
#endif
448 449 450 451

    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);

452 453 454
#if (DEBUG != 0)
    INFO(info, 8)("Fill petsc matrix 3 needed %.5f seconds\n", TIME_USED(first, clock()));
#endif
455

456 457 458 459 460 461 462 463 464 465 466 467
    // === Transfer values from DOF vector to the PETSc vector. === 

    for (int i = 0; i < nComponents; i++)
      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscRhsVec);
    VecAssemblyEnd(petscRhsVec);

    INFO(info, 8)("Fill petsc matrix needed %.5f seconds\n", TIME_USED(first, clock()));
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
468
  void PetscSolver::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
469
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
470
    FUNCNAME("PetscSolver::solvePetscMatrix()");
471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497

#if 0
    // Set old solution to be initiual guess for petsc solver.
    for (int i = 0; i < nComponents; i++)
      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscSolVec);
    VecAssemblyEnd(petscSolVec);
#endif

    // === Init Petsc solver. ===

    KSP solver;
    KSPCreate(PETSC_COMM_WORLD, &solver);
    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
    KSPSetType(solver, KSPBCGS);
    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
    KSPSetFromOptions(solver);
    // Do not delete the solution vector, use it for the initial guess.
    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);


    // === Run Petsc. ===

    KSPSolve(solver, petscRhsVec, petscSolVec);

498

499
    // === Transfere values from Petsc's solution vectors to the dof vectors.
500

501 502 503
    PetscScalar *vecPointer;
    VecGetArray(petscSolVec, &vecPointer);

504
    int nRankDofs = meshDistributor->getNumberRankDofs();
505
    for (int i = 0; i < nComponents; i++) {
506
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
507
      for (int j = 0; j < nRankDofs; j++)
508
	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
509
	  vecPointer[j * nComponents + i]; 
510 511 512 513 514 515 516
    }

    VecRestoreArray(petscSolVec, &vecPointer);


    // === Synchronize dofs at common dofs, i.e., dofs that correspond to more ===
    // === than one partition.                                                 ===
517
    meshDistributor->synchVector(vec);
518 519 520 521 522 523 524


    // === Print information about solution process. ===

    int iterations = 0;
    KSPGetIterationNumber(solver, &iterations);
    MSG("  Number of iterations: %d\n", iterations);
525 526
    adaptInfo->setSolverIterations(iterations);

527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543
    double norm = 0.0;
    MatMult(petscMatrix, petscSolVec, petscTmpVec);
    VecAXPY(petscTmpVec, -1.0, petscRhsVec);
    VecNorm(petscTmpVec, NORM_2, &norm);
    MSG("  Residual norm: %e\n", norm);


    // === Destroy Petsc's variables. ===

    MatDestroy(petscMatrix);
    VecDestroy(petscRhsVec);
    VecDestroy(petscSolVec);
    VecDestroy(petscTmpVec);
    KSPDestroy(solver);
  }

}