PetscSolver.cc 20.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


Thomas Witkowski's avatar
Thomas Witkowski committed
13
14
15
#include <vector>
#include <set>

Thomas Witkowski's avatar
Thomas Witkowski committed
16
#include "parallel/PetscSolver.h"
17
18
#include "parallel/StdMpi.h"
#include "parallel/ParallelDebug.h"
19
20
21
#include "DOFVector.h"
#include "Debug.h"
#include "SystemVector.h"
22

23
24
25
26
27
28
#include "petscksp.h"

namespace AMDiS {

  PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
  {    
29
    if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
30
31
32
33
34
      std::cout << "[0]  Petsc-Iteration " << iter << ": " << rnorm << std::endl;

    return 0;
  }
 
35

Thomas Witkowski's avatar
Thomas Witkowski committed
36
  void PetscSolver::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
37
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
38
    FUNCNAME("PetscSolver::solve()");
39

40
41
    TEST_EXIT(meshDistributor)("Should not happen!\n");

42
    double wtime = MPI::Wtime();
43

44
    fillPetscMatrix(systemMatrix, rhs);
45
    solvePetscMatrix(*solution, adaptInfo);   
46

47
48
    INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
		  MPI::Wtime() - wtime);
49
50
51
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
52
53
  void PetscSolver::setDofMatrix(DOFMatrix* mat, int dispMult, 
				 int dispAddRow, int dispAddCol)
54
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
55
    FUNCNAME("PetscSolver::setDofMatrix()");
56
57
58

    TEST_EXIT(mat)("No DOFMatrix!\n");

Thomas Witkowski's avatar
Thomas Witkowski committed
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
    typedef map<DegreeOfFreedom, DegreeOfFreedom> DofMapping;
    typedef map<BoundaryType, DofMapping> PeriodicDofMap;

    StdMpi<PeriodicDofMap> stdMpi(meshDistributor->getMpiComm());
    if (meshDistributor->getMpiRank() == 0) {
      for (int i = 1; i < meshDistributor->getMpiSize(); i++)
	stdMpi.recv(i);
    } else {
      stdMpi.send(0, meshDistributor->getPeriodicMapping());
    }
    stdMpi.startCommunication<int>(MPI_INT);



    StdMpi<PeriodicDofMap> stdMpi2(meshDistributor->getMpiComm());
    PeriodicDofMap overallDofMap;

    if (meshDistributor->getMpiRank() == 0) {
      overallDofMap = meshDistributor->getPeriodicMapping();
      for (int i = 1; i < meshDistributor->getMpiSize(); i++) {
	PeriodicDofMap &rankDofMap = stdMpi.getRecvData(i);
	for (PeriodicDofMap::iterator it = rankDofMap.begin(); it != rankDofMap.end(); ++it) {
	  for (DofMapping::iterator dofIt = it->second.begin(); dofIt != it->second.end(); ++dofIt) {
	    if (overallDofMap[it->first].count(dofIt->first) == 1) {
	      TEST_EXIT_DBG(overallDofMap[it->first][dofIt->first] == dofIt->second)
		("Should not happen!\n");
	    } else {
	      overallDofMap[it->first][dofIt->first] = dofIt->second;
	    }
	  }
	}
      }

      for (int i = 1; i < meshDistributor->getMpiSize(); i++)
	stdMpi2.send(i, overallDofMap);
    } else {
      stdMpi2.recv(0);
    }


    stdMpi2.startCommunication<int>(MPI_INT);
    
    if (meshDistributor->getMpiRank() > 0)
      overallDofMap = stdMpi2.getRecvData(0);
   

105
106
107
108
109
110
111
112
113
114
115
116
117
118
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits= mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

    traits::col<Matrix>::type col(mat->getBaseMatrix());
    traits::const_value<Matrix>::type value(mat->getBaseMatrix());

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

    std::vector<int> cols;
    std::vector<double> values;
    cols.reserve(300);
    values.reserve(300);
119
120
    
    std::vector<int> globalCols;
121
122
123
124
125
126
127
128

    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the petsc matrix.                                               ===

    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {

      // Global index of the current row dof.
129
      int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
130
      // Test if the current row dof is a periodic dof.
131
132
      bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);

Thomas Witkowski's avatar
Thomas Witkowski committed
133
134
135
      if (!periodicRow) {
	// Calculate petsc row index.
	int rowIndex = globalRowDof * dispMult + dispAddRow;
136

Thomas Witkowski's avatar
Thomas Witkowski committed
137
138
	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	     icursor != icend; ++icursor) {
139

Thomas Witkowski's avatar
Thomas Witkowski committed
140
141
142
143
	  // Global index of the current column index.
	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
	  // Test if the current col dof is a periodic dof.
	  bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
144

Thomas Witkowski's avatar
Thomas Witkowski committed
145
146
147
148
149
150
151
152
153
154
	  if (!periodicCol) {
	    // Calculate the exact position of the column index in the petsc matrix.
	    int colIndex = globalColDof * dispMult + dispAddCol;
	    MatSetValue(petscMatrix, rowIndex, colIndex, value(*icursor), ADD_VALUES);
	  } else {
	    std::set<int>& perColAsc = meshDistributor->getPerDofAssociations(globalColDof);
	    std::set<int> perAsc;
	    for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it)
	      if (*it >= -3)
		perAsc.insert(*it);
155
	    
Thomas Witkowski's avatar
Thomas Witkowski committed
156
157
158
159
160
161
162
163
	    double scaledValue = value(*icursor) * std::pow(0.5, static_cast<double>(perAsc.size()));
	    std::vector<int> newCols;
	    newCols.push_back(globalColDof);
	    
	    for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
	      int nCols = static_cast<int>(newCols.size());
	      for (int i = 0; i < nCols; i++) {
		TEST_EXIT(overallDofMap[*it].count(newCols[i]) > 0)("Should not happen: %d %d\n", *it, newCols[i]);
164

Thomas Witkowski's avatar
Thomas Witkowski committed
165
166
167
		newCols.push_back(overallDofMap[*it][newCols[i]]);
	      }
	    }
168

Thomas Witkowski's avatar
Thomas Witkowski committed
169
170
171
172
173
	    for (int i = 0; i < newCols.size(); i++) {
	      int colIndex = newCols[i] * dispMult + dispAddCol;
	      MatSetValue(petscMatrix, rowIndex, colIndex, scaledValue, ADD_VALUES);
	    }
	  }
174
175
	}
      } else {
Thomas Witkowski's avatar
Thomas Witkowski committed
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
	for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	     icursor != icend; ++icursor) {	  

	  // Global index of the current column index.
	  int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));

	  std::set<int>& perColAsc = meshDistributor->getPerDofAssociations(globalColDof);
	  std::set<int> perAsc;
	  for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it)
	    if (*it >= -3)
	      perAsc.insert(*it);

	  std::set<int>& perRowAsc = meshDistributor->getPerDofAssociations(globalRowDof);
	  for (std::set<int>::iterator it = perRowAsc.begin(); it != perRowAsc.end(); ++it)
	    if (*it >= -3)
	      perAsc.insert(*it);

	  double scaledValue = value(*icursor) * std::pow(0.5, static_cast<double>(perAsc.size()));
	  std::vector<std::pair<int, int> > entry;
	  entry.push_back(std::make_pair(globalRowDof, globalColDof));

	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
	    int nEntry = static_cast<int>(entry.size());
	    for (int i = 0; i < nEntry; i++) {
200
	      int perRowDof = 0;
Thomas Witkowski's avatar
Thomas Witkowski committed
201
	      if (overallDofMap[*it].count(entry[i].first) > 0)
202
		perRowDof = overallDofMap[*it][entry[i].first];
Thomas Witkowski's avatar
Thomas Witkowski committed
203
	      else
204
		perRowDof = entry[i].first;
Thomas Witkowski's avatar
Thomas Witkowski committed
205

206
	      int perColDof;
Thomas Witkowski's avatar
Thomas Witkowski committed
207
	      if (overallDofMap[*it].count(entry[i].second) > 0)
208
		perColDof = overallDofMap[*it][entry[i].second];
Thomas Witkowski's avatar
Thomas Witkowski committed
209
	      else
210
		perColDof = entry[i].second;	      	      
Thomas Witkowski's avatar
Thomas Witkowski committed
211

212
	      entry.push_back(std::make_pair(perRowDof, perColDof));
Thomas Witkowski's avatar
Thomas Witkowski committed
213
214
215
	    }
	  }

216
217
	  for (std::vector<std::pair<int, int> >::iterator eIt = entry.begin(); 
	       eIt != entry.end(); ++eIt) {
Thomas Witkowski's avatar
Thomas Witkowski committed
218
219
220
221
222
223
224
	    // Calculate petsc row index.
	    int rowIndex = eIt->first * dispMult + dispAddRow;
	    int colIndex = eIt->second * dispMult + dispAddCol;
	    MatSetValue(petscMatrix, rowIndex, colIndex, scaledValue, ADD_VALUES);
	  }
	}
      }
225
226
227
228
    }
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
229
230
  void PetscSolver::setDofVector(Vec& petscVec, DOFVector<double>* vec, 
				 int dispMult, int dispAdd)
231
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
232
    FUNCNAME("PetscSolver::setDofVector()");
233

234
235
236
237
    // Traverse all used dofs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
      // Calculate global row index of the dof.
238
      DegreeOfFreedom globalRowDof = 
239
	meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
240
      // Calculate petsc index of the row dof.
241
      int index = globalRowDof * dispMult + dispAdd;
242

243
244
245
      if (meshDistributor->isPeriodicDof(globalRowDof)) {
	std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
	double value = *dofIt / (perAsc.size() + 1.0);
246
247
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);

248
249
250
251
	for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
	  int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalRowDof);
	  int mappedIndex = mappedDof * dispMult + dispAdd;
	  VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
252
253
254
255
256
257
	}
      } else {
	// The dof index is not periodic.
	double value = *dofIt;
	VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
      }
258
    }
259
260
261
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
262
  void PetscSolver::createPetscNnzStructure(Matrix<DOFMatrix*> *mat)
263
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
264
    FUNCNAME("PetscSolver::createPetscNnzStructure()");
265
266
267
268

    TEST_EXIT_DBG(!d_nnz)("There is something wrong!\n");
    TEST_EXIT_DBG(!o_nnz)("There is something wrong!\n");

269
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
    d_nnz = new int[nRankRows];
    o_nnz = new int[nRankRows];
    for (int i = 0; i < nRankRows; i++) {
      d_nnz[i] = 0;
      o_nnz[i] = 0;
    }

    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;
    typedef std::vector<std::pair<int, int> > MatrixNnzEntry;

    // Stores to each rank a list of nnz entries (i.e. pairs of row and column index)
    // that this rank will send to. This nnz entries will be assembled on this rank,
    // but because the row DOFs are not DOFs of this rank they will be send to the
    // owner of the row DOFs.
    std::map<int, MatrixNnzEntry> sendMatrixEntry;

288
289
290
291
292
293
294
295
296

    // First, create for all ranks we send data to MatrixNnzEntry object with 0 entries.
    typedef std::map<int, DofContainer> RankToDofContainer;
    RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
    for (RankToDofContainer::iterator it = recvDofs.begin();
	 it != recvDofs.end(); ++it)
      sendMatrixEntry[it->first].resize(0);


297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
    for (int i = 0; i < nComponents; i++) {
      for (int j = 0; j < nComponents; j++) {
 	if ((*mat)[i][j]) {
	  Matrix bmat = (*mat)[i][j]->getBaseMatrix();

	  traits::col<Matrix>::type col(bmat);
	  traits::const_value<Matrix>::type value(bmat);
	  
	  typedef traits::range_generator<row, Matrix>::type cursor_type;
	  typedef traits::range_generator<nz, cursor_type>::type icursor_type;
	  
	  for (cursor_type cursor = begin<row>(bmat), 
		 cend = end<row>(bmat); cursor != cend; ++cursor) {

	    // Map the local row number to the global DOF index and create from it
	    // the global PETSc row index of this DOF.
313
314
	    int petscRowIdx = 
	      meshDistributor->mapLocalToGlobal(*cursor) * nComponents + i;
315

316
	    if (meshDistributor->getIsRankDof(*cursor)) {
317
318
319
320
321

	      // === The current row DOF is a rank dof, so create the corresponding ===
	      // === nnz values directly on rank's nnz data.                        ===

	      // This is the local row index of the local PETSc matrix.
322
323
	      int localPetscRowIdx = 
		petscRowIdx - meshDistributor->getRstart() * nComponents;
324

325
	      TEST_EXIT_DBG(localPetscRowIdx >= 0 && localPetscRowIdx < nRankRows)
326
327
		("Should not happen! \n Debug info: localRowIdx = %d   globalRowIndx = %d   petscRowIdx = %d   localPetscRowIdx = %d   rStart = %d   nCompontens = %d   nRankRows = %d\n",
		 *cursor, meshDistributor->mapLocalToGlobal(*cursor), petscRowIdx, localPetscRowIdx, meshDistributor->getRstart(), nComponents, nRankRows);
328
329
330
331
	      
	      // Traverse all non zero entries in this row.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
332
333
		int petscColIdx = 
		  meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
334

335
		if (value(*icursor) != 0.0 || petscRowIdx == petscColIdx) {
336
337
		  // The row DOF is a rank DOF, if also the column is a rank DOF, 
		  // increment the d_nnz values for this row, otherwise the o_nnz value.
338
339
		  if (petscColIdx >= meshDistributor->getRstart() * nComponents && 
		      petscColIdx < meshDistributor->getRstart() * nComponents + nRankRows)
340
341
342
343
344
345
		    d_nnz[localPetscRowIdx]++;
		  else
		    o_nnz[localPetscRowIdx]++;
		}    
	      }
	    } else {
346
347
	      typedef std::map<int, DofContainer> RankToDofContainer;

348
349
350
351
352
353
354
	      // === The current row DOF is not a rank dof, i.e., it will be created ===
	      // === on this rank, but after this it will be send to another rank    ===
	      // === matrix. So we need to send also the corresponding nnz structure ===
	      // === of this row to the corresponding rank.                          ===

	      // Find out who is the member of this DOF.
	      int sendToRank = -1;
355
356
	      for (RankToDofContainer::iterator it = meshDistributor->getRecvDofs().begin();
		   it != meshDistributor->getRecvDofs().end(); ++it) {
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
		for (DofContainer::iterator dofIt = it->second.begin();
		     dofIt != it->second.end(); ++dofIt) {
		  if (**dofIt == *cursor) {
		    sendToRank = it->first;
		    break;
		  }
		}

		if (sendToRank != -1)
		  break;
	      }

	      TEST_EXIT_DBG(sendToRank != -1)("Should not happen!\n");

	      // Send all non zero entries to the member of the row DOF.
	      for (icursor_type icursor = begin<nz>(cursor), 
		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
		if (value(*icursor) != 0.0) {
375
376
		  int petscColIdx = 
		    meshDistributor->mapLocalToGlobal(col(*icursor)) * nComponents + j;
377
378
379
380
381
382
383
384
385
386
387
388
389
390
		  
		  sendMatrixEntry[sendToRank].
		    push_back(std::make_pair(petscRowIdx, petscColIdx));
		}
	      }

	    } // if (isRankDof[*cursor]) ... else ...
	  } // for each row in mat[i][j]
	} // if mat[i][j]
      } 
    }

    // === Send and recv the nnz row structure to/from other ranks. ===

391
    StdMpi<MatrixNnzEntry> stdMpi(meshDistributor->getMpiComm(), true);
392
    stdMpi.send(sendMatrixEntry);
393
    stdMpi.recv(meshDistributor->getSendDofs());
394
395
    stdMpi.startCommunication<int>(MPI_INT);

396

397
398
399
400
401
402
403
404
405
406
    // === Evaluate the nnz structure this rank got from other ranks and add it to ===
    // === the PETSc nnz data structure.                                           ===

    for (std::map<int, MatrixNnzEntry>::iterator it = stdMpi.getRecvData().begin();
	 it != stdMpi.getRecvData().end(); ++it) {
      if (it->second.size() > 0) {
	for (unsigned int i = 0; i < it->second.size(); i++) {
	  int r = it->second[i].first;
	  int c = it->second[i].second;

407
	  int localRowIdx = r - meshDistributor->getRstart() * nComponents;
408
409
410
411
412

	  TEST_EXIT_DBG(localRowIdx >= 0 && localRowIdx < nRankRows)
	    ("Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!\n",
	     r, localRowIdx, nRankRows, it->first);
	  
413
414
	  if (c < meshDistributor->getRstart() * nComponents || 
	      c >= meshDistributor->getRstart() * nComponents + nRankRows)
415
416
417
418
419
	    o_nnz[localRowIdx]++;
	  else
	    d_nnz[localRowIdx]++;
	}
      }
420
    }
421
422
423
424
425
426
427
428
429
430

    // The above algorithm for calculating the number of nnz per row over-
    // approximates the value, i.e., the number is always equal or larger to 
    // the real number of nnz values in the global parallel matrix. For small
    // matrices, the problem may arise, that the result is larger than the
    // number of elements in a row. This is fixed in the following.

    if (nRankRows < 100) 
      for (int i = 0; i < nRankRows; i++)
	d_nnz[i] = std::min(d_nnz[i], nRankRows);
431
432
433
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
434
  void PetscSolver::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
435
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
436
    FUNCNAME("PetscSolver::fillPetscMatrix()");
437

438
    double wtime = MPI::Wtime();
439
440
    int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
    int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455

    // === Create PETSc vector (rhs, solution and a temporary vector). ===

    VecCreate(PETSC_COMM_WORLD, &petscRhsVec);
    VecSetSizes(petscRhsVec, nRankRows, nOverallRows);
    VecSetType(petscRhsVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscSolVec);
    VecSetSizes(petscSolVec, nRankRows, nOverallRows);
    VecSetType(petscSolVec, VECMPI);

    VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
    VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
    VecSetType(petscTmpVec, VECMPI);

456
457
458
459
460
    int recvAllValues = 0;
    int sendValue = static_cast<int>(meshDistributor->getLastMeshChangeIndex() != lastMeshNnz);
    meshDistributor->getMpiComm().Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);

    if (!d_nnz || recvAllValues != 0) {
461
462
      if (d_nnz) {
	delete [] d_nnz;
463
	d_nnz = NULL;
464
	delete [] o_nnz;
465
	o_nnz = NULL;
466
467
      }

468
      createPetscNnzStructure(mat);
469
      lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
470
    }
471

472

473
474
475
    // === Create PETSc matrix with the computed nnz data structure. ===

    MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows, nOverallRows, nOverallRows,
476
		    0, d_nnz, 0, o_nnz, &petscMatrix);
477
    
478
#if (DEBUG != 0)
479
    INFO(info, 8)("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
480
#endif
481
482
483
484

#if (DEBUG != 0)
    int a, b;
    MatGetOwnershipRange(petscMatrix, &a, &b);
485
486
487
488
    TEST_EXIT(a == meshDistributor->getRstart() * nComponents)
      ("Wrong matrix ownership range!\n");
    TEST_EXIT(b == meshDistributor->getRstart() * nComponents + nRankRows)
      ("Wrong matrix ownership range!\n");
489
490
#endif

491

492
493
494
495
496
    // === Transfer values from DOF matrices to the PETSc matrix. === 

    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
	if ((*mat)[i][j])
497
498
	  setDofMatrix((*mat)[i][j], nComponents, i, j);	
	
499
#if (DEBUG != 0)
500
    INFO(info, 8)("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
501
#endif
502
503
504
505

    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);

506
#if (DEBUG != 0)
507
508
    INFO(info, 8)("Fill petsc matrix 3 needed %.5f seconds\n", 
		  TIME_USED(MPI::Wtime(), wtime));
509
#endif
510

511
512
513
514
515
516
517
518
    // === Transfer values from DOF vector to the PETSc vector. === 

    for (int i = 0; i < nComponents; i++)
      setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscRhsVec);
    VecAssemblyEnd(petscRhsVec);

519
    INFO(info, 8)("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
520
521
522
  }


Thomas Witkowski's avatar
Thomas Witkowski committed
523
  void PetscSolver::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
524
  {
Thomas Witkowski's avatar
Thomas Witkowski committed
525
    FUNCNAME("PetscSolver::solvePetscMatrix()");
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552

#if 0
    // Set old solution to be initiual guess for petsc solver.
    for (int i = 0; i < nComponents; i++)
      setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);

    VecAssemblyBegin(petscSolVec);
    VecAssemblyEnd(petscSolVec);
#endif

    // === Init Petsc solver. ===

    KSP solver;
    KSPCreate(PETSC_COMM_WORLD, &solver);
    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
    KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
    KSPSetType(solver, KSPBCGS);
    KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
    KSPSetFromOptions(solver);
    // Do not delete the solution vector, use it for the initial guess.
    //    KSPSetInitialGuessNonzero(solver, PETSC_TRUE);


    // === Run Petsc. ===

    KSPSolve(solver, petscRhsVec, petscSolVec);

553

554
    // === Transfere values from Petsc's solution vectors to the dof vectors.
555

556
557
558
    PetscScalar *vecPointer;
    VecGetArray(petscSolVec, &vecPointer);

559
    int nRankDofs = meshDistributor->getNumberRankDofs();
560
    for (int i = 0; i < nComponents; i++) {
561
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
562
      for (int j = 0; j < nRankDofs; j++)
563
	dofvec[meshDistributor->mapLocalToDofIndex(j)] = 
564
	  vecPointer[j * nComponents + i]; 
565
566
567
568
569
570
571
    }

    VecRestoreArray(petscSolVec, &vecPointer);


    // === Synchronize dofs at common dofs, i.e., dofs that correspond to more ===
    // === than one partition.                                                 ===
572
    meshDistributor->synchVector(vec);
573
574
575
576
577
578
579


    // === Print information about solution process. ===

    int iterations = 0;
    KSPGetIterationNumber(solver, &iterations);
    MSG("  Number of iterations: %d\n", iterations);
580
581
    adaptInfo->setSolverIterations(iterations);

582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
    double norm = 0.0;
    MatMult(petscMatrix, petscSolVec, petscTmpVec);
    VecAXPY(petscTmpVec, -1.0, petscRhsVec);
    VecNorm(petscTmpVec, NORM_2, &norm);
    MSG("  Residual norm: %e\n", norm);


    // === Destroy Petsc's variables. ===

    MatDestroy(petscMatrix);
    VecDestroy(petscRhsVec);
    VecDestroy(petscSolVec);
    VecDestroy(petscTmpVec);
    KSPDestroy(solver);
  }

}