PetscSolverGlobalBlockMatrix.cc 9.63 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
 * Authors: 
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
 * 
 ******************************************************************************/
20
21
22
23
24
25


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

26
27
using namespace std;

28
namespace AMDiS { namespace Parallel {
29

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
30
  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
31
32
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
33

34
    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
35
    TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
36
37
    TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");

38
    double wtime = MPI::Wtime();
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
39

40
    prepare();
41

42
    const FiniteElemSpace *feSpace = componentSpaces[0];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
43
    nComponents = seqMat->getNumRows();
44
45
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
46
47
48
49
50

#if (DEBUG != 0)
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
    if (nBlocks == -1) {
      nBlocks = nComponents;
      for (int i = 0; i < nBlocks; i++)
	componentInBlock[i] = i;
    }

    vector<int> compNthInBlock(nComponents, 0);
    vector<int> blockSize(nBlocks, 0);

    for (int i = 0; i < nComponents; i++) {
      compNthInBlock[i] = blockSize[componentInBlock[i]];
      blockSize[componentInBlock[i]]++;
    }

    nestMat.resize(nBlocks * nBlocks);
66
67
68

    // === Transfer values from DOF matrices to the PETSc matrix. === 

69
70
    for (int i = 0; i < nBlocks; i++)
      for (int j = 0; j < nBlocks; j++)
71
	MatCreateAIJ(domainComm,
72
73
		     nRankRows * blockSize[i], nRankRows * blockSize[j],
		     nOverallRows * blockSize[i], nOverallRows * blockSize[j],
74
75
		     300 * blockSize[i], PETSC_NULL, 
		     300 * blockSize[j], PETSC_NULL,
76
		     &(nestMat[i * nBlocks + j]));
77
			
78
79
    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
80
	if ((*seqMat)[i][j]) {
81
	  int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
82
	  setDofMatrix(nestMat[idx], (*seqMat)[i][j], 
83
		       compNthInBlock[i], compNthInBlock[j]);
84
85
	}

86
87
88
89
90
91
92
93
94
95
96
    for (int i = 0; i < nBlocks; i++) {
      for (int j = 0; j < nBlocks; j++) {
	int idx = i * nBlocks + j;
	if (nestMat[idx]) {
	  MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
	}
      }
    }	  
	

97
    MatCreateNest(domainComm, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
98
		  &(nestMat[0]), &getMatInterior());
99
100
101
102
103

#if (DEBUG != 0)
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

104
    matAssembly();
105

106
107
    
    /// initPreconditioner(...)
108
109
110
111
112
113

    // === Init PETSc solver and preconditioner objects. ===

    initSolver(kspInterior);
    KSPGetPC(kspInterior, &pcInterior);
    initPreconditioner(pcInterior);
114

115
116
117
118
119
120
121
122
123
124
125
    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
126
    const FiniteElemSpace *feSpace = componentSpaces[0];
127
128
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
129

130
    nestVec.resize(nComponents);
131

132
    for (int i = 0; i < nComponents; i++) {
133
      VecCreateMPI(domainComm, nRankRows, nOverallRows, &(nestVec[i]));
134

135
136
137
138
139
140
      setDofVector(nestVec[i], vec->getDOFVector(i));
      
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

141
    VecCreateNest(domainComm, nComponents, PETSC_NULL, 
142
		  &(nestVec[0]), &(getVecRhsInterior()));
143

144
    vecRhsAssembly();
145
146
147
  }


148
149
150
151
  void PetscSolverGlobalBlockMatrix::initSolver(KSP &ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()");

152
    KSPCreate(domainComm, &ksp);
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
    KSPSetOperators(ksp, getMatInterior(), getMatInterior(), 
		    SAME_NONZERO_PATTERN); 
    KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
    KSPSetFromOptions(ksp);
  }


  void PetscSolverGlobalBlockMatrix::exitSolver(KSP ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitSolver()");

    KSPDestroy(&ksp);
  }


  void PetscSolverGlobalBlockMatrix::initPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initPreconditioner()");

    PCSetFromOptions(pc);
  }


  void PetscSolverGlobalBlockMatrix::exitPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitPreconditioner()");
  }


182
183
184
185
186
  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, 
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

187
    const FiniteElemSpace *feSpace = componentSpaces[0];
188
    VecDuplicate(getVecRhsInterior(), &petscSolVec);
189
190
191
192
193
194
195
196
197
    
    for (int i = 0; i < vec.getSize(); i++)
    {
      Vec tmp;
      VecNestGetSubVec(petscSolVec, i, &tmp);
      setDofVector(tmp, vec.getDOFVector(i));
      VecAssemblyBegin(tmp); 
      VecAssemblyEnd(tmp);
    }
Thomas Witkowski's avatar
Thomas Witkowski committed
198

199
    // PETSc.
200
    solve(getVecRhsInterior(), petscSolVec);
201
202
203
204

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
205
206

      Vec tmp;
Thomas Witkowski's avatar
Thomas Witkowski committed
207
      VecNestGetSubVec(petscSolVec, i, &tmp);
208

209
//       int nRankDofs = (*interiorMap)[feSpace].nRankDofs;
210
211
212
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

213
      DofMap& d = (*interiorMap)[feSpace].getMap();
214
215
216
      for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
	if (it->second.local != -1)
	  dofvec[it->first] = vecPointer[it->second.local];
217

218
219
      VecRestoreArray(tmp, &vecPointer);
    }
220

221
    VecDestroy(&petscSolVec);
222
223
224
225
226
227
228
229
230
231
232

    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

233
234
235
236
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

237
    matDestroy();
238

239
240
241
    exitPreconditioner(pcInterior);

    exitSolver(kspInterior);
242
243
244
  }


245
246
247
248
  void PetscSolverGlobalBlockMatrix::destroyVectorData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");

249
250
251
    for (unsigned int i = 0; i < nestVec.size(); i++)
      VecDestroy(&(nestVec[i]));
      
252
    vecDestroy();
253
254
255
  }


256
  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, 
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
257
						  DOFMatrix* seqMat,
258
259
						  int dispRowBlock, 
						  int dispColBlock)
260
261
262
263
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(petscMat)("No PETSc matrix!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
264
    TEST_EXIT(seqMat)("No DOFMatrix!\n");
265

266
    const FiniteElemSpace *feSpace = componentSpaces[0];
267

268
269
270
271
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
272
273
    traits::col<Matrix>::type col(seqMat->getBaseMatrix());
    traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
274
275
276
277

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

278
279
    int dispRowIndex = (*interiorMap)[feSpace].nRankDofs * dispRowBlock;
    int dispColIndex = (*interiorMap)[feSpace].nRankDofs * dispColBlock;
280

281
282
    vector<int> cols;
    vector<double> values;
283
284
    cols.reserve(3000);
    values.reserve(3000);
285
286
287
288
    
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
289
290
    for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()), 
	   cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
291
292

      // Global index of the current row DOF.
293
      int rowIndex = (*interiorMap)[feSpace][cursor.value()].global + dispRowIndex;
294
295
296
297
298
299
300

      cols.clear();
      values.clear();
      
      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {	
	// Global index of the current column index.
301
	int colIndex = (*interiorMap)[feSpace][col(*icursor)].global + dispColIndex;
302
303
304
305
306
307
308
309
310
	
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
	
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
311
312
313

      MatSetValues(petscMat, 1, &rowIndex, cols.size(), 
 		   &(cols[0]), &(values[0]), ADD_VALUES);	
314
315
316
317
318
    }
  }
  
  
  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, 
319
						  DOFVector<double>* vec)
320
321
322
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

323
    const FiniteElemSpace *feSpace = componentSpaces[0];
324

325
326
327
    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
328
      int index = (*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
329
330
331
332
333
334
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

335
} } // end namespace Parallel, AMDiS