PetscSolverGlobalBlockMatrix.cc 9.6 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
 * Authors: 
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
 * 
 ******************************************************************************/
20
21
22
23
24
25


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

26
namespace AMDiS { namespace Parallel {
27

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
28
  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
29
30
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
31

32
    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
33
    TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
34
35
    TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");

36
    double wtime = MPI::Wtime();
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
37

38
    prepare();
39

40
    const FiniteElemSpace *feSpace = componentSpaces[0];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
41
    nComponents = seqMat->getNumRows();
42
43
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
44
45
46
47
48

#if (DEBUG != 0)
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
    if (nBlocks == -1) {
      nBlocks = nComponents;
      for (int i = 0; i < nBlocks; i++)
	componentInBlock[i] = i;
    }

    vector<int> compNthInBlock(nComponents, 0);
    vector<int> blockSize(nBlocks, 0);

    for (int i = 0; i < nComponents; i++) {
      compNthInBlock[i] = blockSize[componentInBlock[i]];
      blockSize[componentInBlock[i]]++;
    }

    nestMat.resize(nBlocks * nBlocks);
64
65
66

    // === Transfer values from DOF matrices to the PETSc matrix. === 

67
68
    for (int i = 0; i < nBlocks; i++)
      for (int j = 0; j < nBlocks; j++)
69
	MatCreateAIJ(domainComm,
70
71
		     nRankRows * blockSize[i], nRankRows * blockSize[j],
		     nOverallRows * blockSize[i], nOverallRows * blockSize[j],
72
73
		     300 * blockSize[i], PETSC_NULL, 
		     300 * blockSize[j], PETSC_NULL,
74
		     &(nestMat[i * nBlocks + j]));
75
			
76
77
    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
78
	if ((*seqMat)[i][j]) {
79
	  int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
80
	  setDofMatrix(nestMat[idx], (*seqMat)[i][j], 
81
		       compNthInBlock[i], compNthInBlock[j]);
82
83
	}

84
85
86
87
88
89
90
91
92
93
94
    for (int i = 0; i < nBlocks; i++) {
      for (int j = 0; j < nBlocks; j++) {
	int idx = i * nBlocks + j;
	if (nestMat[idx]) {
	  MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
	}
      }
    }	  
	

95
    MatCreateNest(domainComm, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
96
		  &(nestMat[0]), &getMatInterior());
97
98
99
100
101

#if (DEBUG != 0)
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

102
    matAssembly();
103

104
105
    
    /// initPreconditioner(...)
106
107
108
109
110
111

    // === Init PETSc solver and preconditioner objects. ===

    initSolver(kspInterior);
    KSPGetPC(kspInterior, &pcInterior);
    initPreconditioner(pcInterior);
112

113
114
115
116
117
118
119
120
121
122
123
    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
124
    const FiniteElemSpace *feSpace = componentSpaces[0];
125
126
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
127

128
    nestVec.resize(nComponents);
129

130
    for (int i = 0; i < nComponents; i++) {
131
      VecCreateMPI(domainComm, nRankRows, nOverallRows, &(nestVec[i]));
132

133
134
135
136
137
138
      setDofVector(nestVec[i], vec->getDOFVector(i));
      
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

139
    VecCreateNest(domainComm, nComponents, PETSC_NULL, 
140
		  &(nestVec[0]), &(getVecRhsInterior()));
141

142
    vecRhsAssembly();
143
144
145
  }


146
147
148
149
  void PetscSolverGlobalBlockMatrix::initSolver(KSP &ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()");

150
    KSPCreate(domainComm, &ksp);
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
    KSPSetOperators(ksp, getMatInterior(), getMatInterior(), 
		    SAME_NONZERO_PATTERN); 
    KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
    KSPSetFromOptions(ksp);
  }


  void PetscSolverGlobalBlockMatrix::exitSolver(KSP ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitSolver()");

    KSPDestroy(&ksp);
  }


  void PetscSolverGlobalBlockMatrix::initPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initPreconditioner()");

    PCSetFromOptions(pc);
  }


  void PetscSolverGlobalBlockMatrix::exitPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitPreconditioner()");
  }


180
181
182
183
184
  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, 
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

185
    const FiniteElemSpace *feSpace = componentSpaces[0];
186
    VecDuplicate(getVecRhsInterior(), &petscSolVec);
187
188
189
190
191
192
193
194
195
    
    for (int i = 0; i < vec.getSize(); i++)
    {
      Vec tmp;
      VecNestGetSubVec(petscSolVec, i, &tmp);
      setDofVector(tmp, vec.getDOFVector(i));
      VecAssemblyBegin(tmp); 
      VecAssemblyEnd(tmp);
    }
Thomas Witkowski's avatar
Thomas Witkowski committed
196

197
    // PETSc.
198
    solve(getVecRhsInterior(), petscSolVec);
199
200
201
202

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
203
204

      Vec tmp;
Thomas Witkowski's avatar
Thomas Witkowski committed
205
      VecNestGetSubVec(petscSolVec, i, &tmp);
206

207
//       int nRankDofs = (*interiorMap)[feSpace].nRankDofs;
208
209
210
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

211
      DofMap& d = (*interiorMap)[feSpace].getMap();
212
213
214
      for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
	if (it->second.local != -1)
	  dofvec[it->first] = vecPointer[it->second.local];
215

216
217
      VecRestoreArray(tmp, &vecPointer);
    }
218

219
    VecDestroy(&petscSolVec);
220
221
222
223
224
225
226
227
228
229
230

    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

231
232
233
234
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

235
    matDestroy();
236

237
238
239
    exitPreconditioner(pcInterior);

    exitSolver(kspInterior);
240
241
242
  }


243
244
245
246
  void PetscSolverGlobalBlockMatrix::destroyVectorData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");

247
248
249
    for (unsigned int i = 0; i < nestVec.size(); i++)
      VecDestroy(&(nestVec[i]));
      
250
    vecDestroy();
251
252
253
  }


254
  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, 
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
255
						  DOFMatrix* seqMat,
256
257
						  int dispRowBlock, 
						  int dispColBlock)
258
259
260
261
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(petscMat)("No PETSc matrix!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
262
    TEST_EXIT(seqMat)("No DOFMatrix!\n");
263

264
    const FiniteElemSpace *feSpace = componentSpaces[0];
265

266
267
268
269
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
270
271
    traits::col<Matrix>::type col(seqMat->getBaseMatrix());
    traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
272
273
274
275

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

276
277
    int dispRowIndex = (*interiorMap)[feSpace].nRankDofs * dispRowBlock;
    int dispColIndex = (*interiorMap)[feSpace].nRankDofs * dispColBlock;
278

279
280
    vector<int> cols;
    vector<double> values;
281
282
    cols.reserve(3000);
    values.reserve(3000);
283
284
285
286
    
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
287
288
    for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()), 
	   cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
289
290

      // Global index of the current row DOF.
291
      int rowIndex = (*interiorMap)[feSpace][cursor.value()].global + dispRowIndex;
292
293
294
295
296
297
298

      cols.clear();
      values.clear();
      
      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {	
	// Global index of the current column index.
299
	int colIndex = (*interiorMap)[feSpace][col(*icursor)].global + dispColIndex;
300
301
302
303
304
305
306
307
308
	
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
	
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
309
310
311

      MatSetValues(petscMat, 1, &rowIndex, cols.size(), 
 		   &(cols[0]), &(values[0]), ADD_VALUES);	
312
313
314
315
316
    }
  }
  
  
  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, 
317
						  DOFVector<double>* vec)
318
319
320
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

321
    const FiniteElemSpace *feSpace = componentSpaces[0];
322

323
324
325
    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
326
      int index = (*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
327
328
329
330
331
332
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

333
} } // end namespace Parallel, AMDiS