PetscSolverGlobalBlockMatrix.cc 8.16 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

namespace AMDiS {

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
19
  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
20
21
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
22

23
    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
24
    TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
25
26
    TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");

27
    double wtime = MPI::Wtime();
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
28

29
    createMatVec(*seqMat);
30

31
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
32
    nComponents = seqMat->getNumRows();
33
34
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
35
36
37
38
39

#if (DEBUG != 0)
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
    if (nBlocks == -1) {
      nBlocks = nComponents;
      for (int i = 0; i < nBlocks; i++)
	componentInBlock[i] = i;
    }

    vector<int> compNthInBlock(nComponents, 0);
    vector<int> blockSize(nBlocks, 0);

    for (int i = 0; i < nComponents; i++) {
      compNthInBlock[i] = blockSize[componentInBlock[i]];
      blockSize[componentInBlock[i]]++;
    }

    nestMat.resize(nBlocks * nBlocks);
55
56
57

    // === Transfer values from DOF matrices to the PETSc matrix. === 

58
59
    for (int i = 0; i < nBlocks; i++)
      for (int j = 0; j < nBlocks; j++)
60
61
62
63
64
65
	MatCreateAIJ(mpiCommGlobal,
		     nRankRows * blockSize[i], nRankRows * blockSize[j],
		     nOverallRows * blockSize[i], nOverallRows * blockSize[j],
		     30 * blockSize[i], PETSC_NULL, 
		     30 * blockSize[j], PETSC_NULL,
		     &(nestMat[i * nBlocks + j]));
66
			
67
68
    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
69
	if ((*seqMat)[i][j]) {
70
	  int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
71
	  setDofMatrix(nestMat[idx], (*seqMat)[i][j], 
72
		       compNthInBlock[i], compNthInBlock[j]);
73
74
	}

75
76
77
78
79
80
81
82
83
84
85
    for (int i = 0; i < nBlocks; i++) {
      for (int j = 0; j < nBlocks; j++) {
	int idx = i * nBlocks + j;
	if (nestMat[idx]) {
	  MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
	}
      }
    }	  
	

86
87
    MatCreateNest(mpiCommGlobal, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
		  &(nestMat[0]), &getMatInterior());
88
89
90
91
92

#if (DEBUG != 0)
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

93
    matAssembly();
94
95

    // === Init PETSc solver. ===
96
    KSPCreate(mpiCommGlobal, &kspInterior);
97
98
    KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), 
		    SAME_NONZERO_PATTERN); 
99
    KSPSetFromOptions(kspInterior);
100

101
102
103
104
105
106
107
108
109
110
111
    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
112
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
113
114
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
115

116
    nestVec.resize(nComponents);
117

118
    for (int i = 0; i < nComponents; i++) {
119
      VecCreateMPI(mpiCommGlobal, nRankRows, nOverallRows, &(nestVec[i]));
120

121
122
123
124
125
126
      setDofVector(nestVec[i], vec->getDOFVector(i));
      
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

127
    VecCreateNest(mpiCommGlobal, nComponents, PETSC_NULL, 
128
		  &(nestVec[0]), &(getVecRhsInterior()));
129

130
    vecRhsAssembly();
131
132
133
134
135
136
137
138
  }


  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, 
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

139
140
    KSPGetPC(kspInterior, &pcInterior);
    setBlockPreconditioner(pcInterior);
141

142
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
143
    VecDuplicate(getVecRhsInterior(), &petscSolVec);
Thomas Witkowski's avatar
Thomas Witkowski committed
144

145
    // PETSc.
146
    solve(getVecRhsInterior(), petscSolVec);
147
148
149
150

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
151
152

      Vec tmp;
Thomas Witkowski's avatar
Thomas Witkowski committed
153
      VecNestGetSubVec(petscSolVec, i, &tmp);
154

155
      int nRankDofs = (*interiorMap)[feSpace].nRankDofs;
156
157
158
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

159
      DofMap& d = (*interiorMap)[feSpace].getMap();
160
161
162
      for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
	if (it->second.local != -1)
	  dofvec[it->first] = vecPointer[it->second.local];
163

164
165
      VecRestoreArray(tmp, &vecPointer);
    }
166
167
168
169
170
171
172
173
174
175
176
177


    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

178
179
180
181
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

182
    matDestroy();
183

184
    KSPDestroy(&kspInterior);
185
186
187
  }


188
189
190
191
  void PetscSolverGlobalBlockMatrix::destroyVectorData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");

192
    vecDestroy();
193
194
195
196
197

    VecDestroy(&petscSolVec);
  }


198
  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, 
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
199
						  DOFMatrix* seqMat,
200
201
						  int dispRowBlock, 
						  int dispColBlock)
202
203
204
205
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(petscMat)("No PETSc matrix!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
206
    TEST_EXIT(seqMat)("No DOFMatrix!\n");
207

208
209
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);

210
211
212
213
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
214
215
    traits::col<Matrix>::type col(seqMat->getBaseMatrix());
    traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
216
217
218
219

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

220
221
    int dispRowIndex = (*interiorMap)[feSpace].nRankDofs * dispRowBlock;
    int dispColIndex = (*interiorMap)[feSpace].nRankDofs * dispColBlock;
222

223
224
225
226
227
228
229
230
    vector<int> cols;
    vector<double> values;
    cols.reserve(300);
    values.reserve(300);
    
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
231
232
    for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()), 
	   cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
233
234

      // Global index of the current row DOF.
235
      int rowIndex = (*interiorMap)[feSpace][*cursor].global + dispRowIndex;
236
237
238
239
240
241
242

      cols.clear();
      values.clear();
      
      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {	
	// Global index of the current column index.
243
	int colIndex = (*interiorMap)[feSpace][col(*icursor)].global + dispColIndex;
244
245
246
247
248
249
250
251
252
	
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
	
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
253
254
255

      MatSetValues(petscMat, 1, &rowIndex, cols.size(), 
 		   &(cols[0]), &(values[0]), ADD_VALUES);	
256
257
258
259
260
    }
  }
  
  
  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, 
261
						  DOFVector<double>* vec)
262
263
264
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

265
266
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);

267
268
269
    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
270
      int index = (*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
271
272
273
274
275
276
277
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

}