PetscSolverGlobalBlockMatrix.cc 8.16 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"

namespace AMDiS {

  void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");

    TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
    TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");

    double wtime = MPI::Wtime();
27
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
28
    nComponents = mat->getNumRows();
29 30
    int nRankRows = meshDistributor->getNumberRankDofs(feSpace);
    int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace);
31 32 33 34 35

#if (DEBUG != 0)
    MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
    if (nBlocks == -1) {
      nBlocks = nComponents;
      for (int i = 0; i < nBlocks; i++)
	componentInBlock[i] = i;
    }

    vector<int> compNthInBlock(nComponents, 0);
    vector<int> blockSize(nBlocks, 0);

    for (int i = 0; i < nComponents; i++) {
      compNthInBlock[i] = blockSize[componentInBlock[i]];
      blockSize[componentInBlock[i]]++;
    }

    nestMat.resize(nBlocks * nBlocks);
51 52 53

    // === Transfer values from DOF matrices to the PETSc matrix. === 

54 55
    for (int i = 0; i < nBlocks; i++)
      for (int j = 0; j < nBlocks; j++)
56
	MatCreateMPIAIJ(mpiComm,
57 58 59 60 61 62
			nRankRows * blockSize[i], nRankRows * blockSize[j],
			nOverallRows * blockSize[i], nOverallRows * blockSize[j],
			30 * blockSize[i], PETSC_NULL, 
			30 * blockSize[j], PETSC_NULL,
			&(nestMat[i * nBlocks + j]));
			
63 64 65
    for (int i = 0; i < nComponents; i++)
      for (int j = 0; j < nComponents; j++)
	if ((*mat)[i][j]) {
66 67 68
	  int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
	  setDofMatrix(nestMat[idx], (*mat)[i][j], 
		       compNthInBlock[i], compNthInBlock[j]);
69 70
	}

71 72 73 74 75 76 77 78 79 80 81
    for (int i = 0; i < nBlocks; i++) {
      for (int j = 0; j < nBlocks; j++) {
	int idx = i * nBlocks + j;
	if (nestMat[idx]) {
	  MatAssemblyBegin(nestMat[idx], MAT_FINAL_ASSEMBLY);
	  MatAssemblyEnd(nestMat[idx], MAT_FINAL_ASSEMBLY);
	}
      }
    }	  
	

82
    MatCreateNest(mpiComm, 
83
		  nBlocks, PETSC_NULL, nBlocks, PETSC_NULL, 
84
		  &(nestMat[0]), &petscMatrix);
85 86 87 88 89 90 91 92 93

#if (DEBUG != 0)
    MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif

    MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
    MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);

    // === Init PETSc solver. ===
94
    KSPCreate(mpiComm, &solver);
95 96
    KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
    KSPSetFromOptions(solver);
97

98 99 100 101 102 103 104 105 106 107 108
    MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
  }


  void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");

    TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");

    nComponents = vec->getSize();
109 110 111
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
    int nRankRows = meshDistributor->getNumberRankDofs(feSpace);
    int nOverallRows = meshDistributor->getNumberOverallDofs(feSpace);
112

113
    nestVec.resize(nComponents);
114

115
    for (int i = 0; i < nComponents; i++) {
116
      VecCreateMPI(mpiComm, nRankRows, nOverallRows, &(nestVec[i]));
117

118 119 120 121 122 123
      setDofVector(nestVec[i], vec->getDOFVector(i));
      
      VecAssemblyBegin(nestVec[i]);
      VecAssemblyEnd(nestVec[i]);
    }

124
    VecCreateNest(mpiComm, nComponents, PETSC_NULL, 
125
		  &(nestVec[0]), &petscRhsVec);
126 127 128 129 130 131 132 133 134 135 136

    VecAssemblyBegin(petscRhsVec);
    VecAssemblyEnd(petscRhsVec);
  }


  void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec, 
						      AdaptInfo *adaptInfo)
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");

137 138 139
    KSPGetPC(solver, &pc);
    setBlockPreconditioner(pc);

140
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
Thomas Witkowski's avatar
Thomas Witkowski committed
141 142
    VecDuplicate(petscRhsVec, &petscSolVec);

143
    // PETSc.
Thomas Witkowski's avatar
Thomas Witkowski committed
144
    KSPSolve(solver, petscRhsVec, petscSolVec);
145 146 147 148

    // === Transfere values from PETSc's solution vectors to the DOF vectors. ===
    for (int i = 0; i < nComponents; i++) {
      DOFVector<double> &dofvec = *(vec.getDOFVector(i));
149 150

      Vec tmp;
Thomas Witkowski's avatar
Thomas Witkowski committed
151
      VecNestGetSubVec(petscSolVec, i, &tmp);
152

153
      int nRankDofs = meshDistributor->getNumberRankDofs(feSpace);
154 155 156
      PetscScalar *vecPointer;
      VecGetArray(tmp, &vecPointer);

Thomas Witkowski's avatar
Thomas Witkowski committed
157 158
      for (int j = 0; j < nRankDofs; j++)
      	dofvec[meshDistributor->mapLocalToDof(feSpace, j)] = vecPointer[j]; 
159

160 161
      VecRestoreArray(tmp, &vecPointer);
    }
162 163 164 165 166 167 168 169 170 171 172 173


    // === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
    // === more than one partition.                                       ===
    meshDistributor->synchVector(vec);
  }


  void PetscSolverGlobalBlockMatrix::destroyMatrixData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");

174 175 176 177
    for (unsigned int i = 0; i < nestMat.size(); i++)
      if (nestMat[i] != PETSC_NULL)
	MatDestroy(&(nestMat[i]));

178 179 180 181 182
    MatDestroy(&petscMatrix);
    KSPDestroy(&solver);
  }


183 184 185 186 187 188 189 190 191 192 193 194
  void PetscSolverGlobalBlockMatrix::destroyVectorData()
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");

    VecDestroy(&petscRhsVec);
    for (int i = 0; i < nComponents; i++)
      VecDestroy(&(nestVec[i]));

    VecDestroy(&petscSolVec);
  }


195 196 197 198
  void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, 
						  DOFMatrix* mat,
						  int dispRowBlock, 
						  int dispColBlock)
199 200 201 202 203 204
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");

    TEST_EXIT(mat)("No DOFMatrix!\n");
    TEST_EXIT(petscMat)("No PETSc matrix!\n");

205 206
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);

207 208 209 210 211 212 213 214 215 216
    using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
    namespace traits = mtl::traits;
    typedef DOFMatrix::base_matrix_type Matrix;

    traits::col<Matrix>::type col(mat->getBaseMatrix());
    traits::const_value<Matrix>::type value(mat->getBaseMatrix());

    typedef traits::range_generator<row, Matrix>::type cursor_type;
    typedef traits::range_generator<nz, cursor_type>::type icursor_type;

217 218 219
    int dispRowIndex = meshDistributor->getNumberRankDofs(feSpace) * dispRowBlock;
    int dispColIndex = meshDistributor->getNumberRankDofs(feSpace) * dispColBlock;

220 221 222 223 224 225 226 227 228 229 230 231
    vector<int> cols;
    vector<double> values;
    cols.reserve(300);
    values.reserve(300);
    
    // === Traverse all rows of the dof matrix and insert row wise the values ===
    // === to the PETSc matrix.                                               ===

    for (cursor_type cursor = begin<row>(mat->getBaseMatrix()), 
	   cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {

      // Global index of the current row DOF.
232 233
      int rowIndex = 
	meshDistributor->mapDofToGlobal(feSpace, *cursor) + dispRowIndex;
234 235 236 237 238 239 240

      cols.clear();
      values.clear();
      
      for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor); 
	   icursor != icend; ++icursor) {	
	// Global index of the current column index.
241 242
	int colIndex = 
	  meshDistributor->mapDofToGlobal(feSpace, col(*icursor)) + dispColIndex;
243 244 245 246 247 248 249 250 251
	
	// Ignore all zero entries, expect it is a diagonal entry.
	if (value(*icursor) == 0.0 && rowIndex != colIndex)
	  continue;
	
	// Calculate the exact position of the column index in the PETSc matrix.
	cols.push_back(colIndex);
	values.push_back(value(*icursor));
      }
252 253 254

      MatSetValues(petscMat, 1, &rowIndex, cols.size(), 
 		   &(cols[0]), &(values[0]), ADD_VALUES);	
255 256 257 258 259
    }
  }
  
  
  void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec, 
260
						  DOFVector<double>* vec)
261 262 263
  {
    FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");

264 265
    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);

266 267 268
    // Traverse all used DOFs in the dof vector.
    DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
    for (dofIt.reset(); !dofIt.end(); ++dofIt) {
269
      int index = meshDistributor->mapDofToGlobal(feSpace, dofIt.getDOFIndex());
270 271 272 273 274 275 276
      double value = *dofIt;

      VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
    }
  }

}