PetscSolverGlobalBlockMatrix.cc 9.66 KB
Newer Older
1 2 3 4 5 6 7
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
8
 * Authors:
9 10 11 12 13 14 15 16 17
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
18
 *
19
 ******************************************************************************/
20 21 22 23 24 25


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

26 27
using namespace std;

28
namespace AMDiS { namespace Parallel {
29

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
30
  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
31 32
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
33

34
    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
35
    TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
36 37
    TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");

38
    double wtime = MPI::Wtime();
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
39

40
    prepare();
41

42
    const FiniteElemSpace *feSpace = componentSpaces[0];
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
43
    nComponents = seqMat->getNumRows();
44 45
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
46

47
#ifndef NDEBUG
48 49 50
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
    if (nBlocks == -1) {
      nBlocks = nComponents;
      for (int i = 0; i < nBlocks; i++)
	componentInBlock[i] = i;
    }

    vector<int> compNthInBlock(nComponents, 0);
    vector<int> blockSize(nBlocks, 0);

    for (int i = 0; i < nComponents; i++) {
      compNthInBlock[i] = blockSize[componentInBlock[i]];
      blockSize[componentInBlock[i]]++;
    }

    nestMat.resize(nBlocks * nBlocks);
66

67
    // === Transfer values from DOF matrices to the PETSc matrix. ===
68

69 70
    for (int i = 0; i < nBlocks; i++)
      for (int j = 0; j < nBlocks; j++)
71
	MatCreateAIJ(domainComm,
72 73
		     nRankRows * blockSize[i], nRankRows * blockSize[j],
		     nOverallRows * blockSize[i], nOverallRows * blockSize[j],
74
		     300 * blockSize[i], PETSC_NULL,
75
		     300 * blockSize[j], PETSC_NULL,
76
		     &(nestMat[i * nBlocks + j]));
77

78 79
    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
80
	if ((*seqMat)[i][j]) {
81
	  int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
82
	  setDofMatrix(nestMat[idx], (*seqMat)[i][j],
83
		       compNthInBlock[i], compNthInBlock[j]);
84 85
	}

86 87 88 89 90 91 92 93
    for (int i = 0; i < nBlocks; i++) {
      for (int j = 0; j < nBlocks; j++) {
	int idx = i * nBlocks + j;
	if (nestMat[idx]) {
	  MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
	}
      }
94 95
    }

96

97
    MatCreateNest(domainComm, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
98
		  &(nestMat[0]), &getMatInterior());
99

100
#ifndef NDEBUG
101 102 103
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

104
    matAssembly();
105

106

107
    /// initPreconditioner(...)
108 109 110 111 112 113

    // === Init PETSc solver and preconditioner objects. ===

    initSolver(kspInterior);
    KSPGetPC(kspInterior, &pcInterior);
    initPreconditioner(pcInterior);
114

115 116 117 118 119 120 121 122 123 124 125
    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
126
    const FiniteElemSpace *feSpace = componentSpaces[0];
127 128
    int nRankRows = (*interiorMap)[feSpace].nRankDofs;
    int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
129

130
    nestVec.resize(nComponents);
131

132
    for (int i = 0; i < nComponents; i++) {
133
      VecCreateMPI(domainComm, nRankRows, nOverallRows, &(nestVec[i]));
134

135
      setDofVector(nestVec[i], vec->getDOFVector(i));
136

137 138 139 140
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

141
    VecCreateNest(domainComm, nComponents, PETSC_NULL,
142
		  &(nestVec[0]), &(getVecRhsInterior()));
143

144
    vecRhsAssembly();
145 146 147
  }


148 149 150 151
  void PetscSolverGlobalBlockMatrix::initSolver(KSP &ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()");

152
    KSPCreate(domainComm, &ksp);
153 154 155 156 157
#if (PETSC_VERSION_MINOR >= 5)
    KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
    KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
    KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
    KSPSetFromOptions(ksp);
  }


  void PetscSolverGlobalBlockMatrix::exitSolver(KSP ksp)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitSolver()");

    KSPDestroy(&ksp);
  }


  void PetscSolverGlobalBlockMatrix::initPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::initPreconditioner()");

    PCSetFromOptions(pc);
  }


  void PetscSolverGlobalBlockMatrix::exitPreconditioner(PC pc)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::exitPreconditioner()");
  }


185
  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec,
186 187 188 189
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

190
    const FiniteElemSpace *feSpace = componentSpaces[0];
191
    VecDuplicate(getVecRhsInterior(), &petscSolVec);
192

193 194 195 196 197
    for (int i = 0; i < vec.getSize(); i++)
    {
      Vec tmp;
      VecNestGetSubVec(petscSolVec, i, &tmp);
      setDofVector(tmp, vec.getDOFVector(i));
198
      VecAssemblyBegin(tmp);
199 200
      VecAssemblyEnd(tmp);
    }
Thomas Witkowski's avatar
Thomas Witkowski committed
201

202
    // PETSc.
203
    solve(getVecRhsInterior(), petscSolVec);
204 205 206 207

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
208 209

      Vec tmp;
Thomas Witkowski's avatar
Thomas Witkowski committed
210
      VecNestGetSubVec(petscSolVec, i, &tmp);
211

212
//       int nRankDofs = (*interiorMap)[feSpace].nRankDofs;
213 214 215
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

216
      DofMap& d = (*interiorMap)[feSpace].getMap();
217 218 219
      for (DofMap::iterator it = d.begin(); it != d.end(); ++it)
	if (it->second.local != -1)
	  dofvec[it->first] = vecPointer[it->second.local];
220

221 222
      VecRestoreArray(tmp, &vecPointer);
    }
223

224
    VecDestroy(&petscSolVec);
225 226 227 228 229 230 231 232 233 234 235

    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

236 237 238 239
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

240
    matDestroy();
241

242 243 244
    exitPreconditioner(pcInterior);

    exitSolver(kspInterior);
245 246 247
  }


248 249 250 251
  void PetscSolverGlobalBlockMatrix::destroyVectorData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");

252 253
    for (unsigned int i = 0; i < nestVec.size(); i++)
      VecDestroy(&(nestVec[i]));
254

255
    vecDestroy();
256 257 258
  }


259
  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat,
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
260
						  DOFMatrix* seqMat,
261
						  int dispRowBlock,
262
						  int dispColBlock)
263 264 265 266
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(petscMat)("No PETSc matrix!\n");
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
267
    TEST_EXIT(seqMat)("No DOFMatrix!\n");
268

269
    const FiniteElemSpace *feSpace = componentSpaces[0];
270

271 272 273 274
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
275 276
    traits::col<Matrix>::type col(seqMat->getBaseMatrix());
    traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
277 278 279 280

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

281 282
    int dispRowIndex = (*interiorMap)[feSpace].nRankDofs * dispRowBlock;
    int dispColIndex = (*interiorMap)[feSpace].nRankDofs * dispColBlock;
283

284 285
    vector<int> cols;
    vector<double> values;
286 287
    cols.reserve(3000);
    values.reserve(3000);
288

289 290 291
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

292
    for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()),
Thomas Witkowski's avatar
Blub  
Thomas Witkowski committed
293
	   cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
294 295

      // Global index of the current row DOF.
296
      int rowIndex = (*interiorMap)[feSpace][cursor.value()].global + dispRowIndex;
297 298 299

      cols.clear();
      values.clear();
300 301 302

      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
	   icursor != icend; ++icursor) {
303
	// Global index of the current column index.
304
	int colIndex = (*interiorMap)[feSpace][col(*icursor)].global + dispColIndex;
305

306 307 308
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
309

310 311 312 313
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
314

315 316
      MatSetValues(petscMat, 1, &rowIndex, cols.size(),
 		   &(cols[0]), &(values[0]), ADD_VALUES);
317 318
    }
  }
319 320 321


  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec,
322
						  DOFVector<double>* vec)
323 324 325
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

326
    const FiniteElemSpace *feSpace = componentSpaces[0];
327

328 329 330
    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
331
      int index = (*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
332 333 334 335 336 337
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

338
} } // end namespace Parallel, AMDiS