ParallelProblemStat.cc 8.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
 * Authors: 
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
 * 
 ******************************************************************************/


22
23
24
25
#include <parallel/ParallelProblemStat.h>
#include <parallel/ParallelSolver.h>
#include <parallel/MeshDistributor.h>
#include <parallel/MpiHelper.h>
26

27
28
#include <parallel/ParallelMapper.h>
#include <solver/LinearSolverInterface.h>
29
30

#ifdef HAVE_PARALLEL_MTL4
31
32
    #include <parallel/PITL_Solver.h>
    #include <solver/KrylovPreconditioner.h>
33
#elif defined HAVE_PARALLEL_PETSC
34
35
36
37
38
39
40
41
42
43
    #include <parallel/PetscSolverGlobalMatrix.h>
    #include <parallel/PetscSolverGlobalBlockMatrix.h>
    #if defined HAVE_PARALLEL_SOLVERS
        #include <parallel/PetscSolverFeti.h>
        #include <parallel/PetscSolverSchur.h>
        #include <parallel/PetscSolverNavierStokes.h>
        #include <parallel/PetscSolverNSCH.h>
        #include <parallel/PetscSolverCahnHilliard.h>
        #include <parallel/PetscSolverCahnHilliard2.h>
    #endif
44
#endif
45
#include <Global.h>
46
47


48
#if defined HAVE_PARALLEL_PETSC || defined HAVE_SEQ_PETSC
49
  #include <solver/PetscTypes.h>
50
51
52
53
54
55
56
57
58
59
60
#endif

namespace AMDiS { namespace Parallel {

  double ParallelProblemStat::initTimeStamp = 0.0;
  bool ParallelProblemStat::initialized = false;


  ParallelProblemStat::ParallelProblemStat(std::string nameStr,
					   ProblemIterationInterface *problemIteration)
    : ProblemStatSeq(nameStr, problemIteration),
61
      meshDistributor(NULL)
62
63
64
65
66
67
68
69
70
71
72
73
  {
    initTimeStamp = MPI::Wtime();
    mpi::globalMin(initTimeStamp);
    addSolvers();
  }


  void ParallelProblemStat::initialize(Flag initFlag,
					   ProblemStatSeq *adoptProblem,
					   Flag adoptFlag)
  {
    FUNCNAME("ParallelProblemStat::initialize()");
74
    MSG("ParallelProblemStat::initialize()\n");
75
76
77
78
79
80
81
82
83
84
85
    
    MSG("Initialization phase 0 needed %.5f seconds\n", 
	MPI::Wtime() - initTimeStamp);

    ProblemStatSeq::initialize(initFlag, adoptProblem, adoptFlag);

    MeshDistributor::addProblemStatGlobal(this);
    meshDistributor = MeshDistributor::globalMeshDistributor;
    meshDistributor->addInterchangeVector(getSolution());
        
    ParallelSolver *parallelSolver = dynamic_cast<ParallelSolver*>(solver);
86
    TEST_EXIT(parallelSolver != NULL)
87
88
89
      ("ParallelProblem loaded, but no ParallelSolver selected! This does not fit together.\n");

    parallelSolver->setMeshDistributor(meshDistributor, 0);
90
91
92
93
94
95
    
    // For the additional component with extra mesh and fespace, we don't need to put them
    // into the parallel system.
    std::vector<const FiniteElemSpace*> tmpFeSpaces, tmpComponentSpaces(getComponentSpaces().begin(),
							   getComponentSpaces().begin() + getNumComponents());
							   
96
    for (size_t i = 0; i < tmpComponentSpaces.size(); i++)
97
98
      if (std::find(tmpFeSpaces.begin(), tmpFeSpaces.end(), tmpComponentSpaces[i]) == tmpFeSpaces.end())
	tmpFeSpaces.push_back(tmpComponentSpaces[i]);
99
     
100
      
101
102
103
104
105
106
107
108
109
110
111
112
113
114
    if (meshDistributor->isInitialized() && !adoptFlag.isSet(INIT_DOFMAP)) {
      WARNING("Meshdistributor already initialized. Initialization of ParallelSolver will "
              "clear the internal dofmap. Use the adoptFlag INIT_DOFMAP to adopt the dofmap "
              "from the adoptProblem!");
    }
      
    if (initFlag.isSet(INIT_DOFMAP) && 
        (!adoptProblem || !adoptFlag.isSet(INIT_DOFMAP))) {
      parallelSolver->init(tmpComponentSpaces, tmpFeSpaces);
    } else {
      parallelSolver->init(tmpComponentSpaces, tmpFeSpaces, false);
      if (adoptProblem && adoptFlag.isSet(INIT_DOFMAP))
        parallelSolver->setDofMapping(dynamic_cast<ParallelSolver*>(adoptProblem->getSolver())->getDofMapping());
    }
115
116
117
118
119
120
121
122
123
  }


  void ParallelProblemStat::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
						  bool assembleMatrix,
						  bool assembleVector)
  {
    FUNCNAME("ParallelProblemStat::buildAfterCoarsen()");

124
    TEST_EXIT(MeshDistributor::globalMeshDistributor != NULL)
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
      ("No Meshdistributor! Should not happen!\n");

    MeshDistributor::globalMeshDistributor->checkMeshChange();
    ProblemStatSeq::buildAfterCoarsen(adaptInfo, flag, 
				      assembleMatrix, assembleVector);
  }


  void ParallelProblemStat::addPeriodicBC(BoundaryType type, int row, int col)
  {
    if (MeshDistributor::globalMeshDistributor->isInitialized())
      return;

    ProblemStatSeq::addPeriodicBC(type, row, col);
  }
  

  void ParallelProblemStat::addSolvers()
  {
    if (!initialized) {
    initialized = true;
    LinearSolverCreator *creator;

#if defined HAVE_PARALLEL_MTL4
    creator = new P_CGSolver::Creator;
150
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_cg", creator);
151
152

    creator = new P_CGSSolver::Creator;
153
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_cgs", creator);
154
155

//     creator = new P_BiCGSolver::Creator;
156
//     CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicg", creator);
157
158

    creator = new P_BiCGStabSolver::Creator;
159
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab", creator);
160
161

    creator = new P_BiCGStab2Solver::Creator;
162
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab2", creator);
163
164

    creator = new P_BiCGStabEllSolver::Creator;
165
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab_ell", creator);
166
167

    creator = new P_QMRSolver::Creator;
168
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_qmr", creator);
169
170

    creator = new P_TFQMRSolver::Creator;
171
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_tfqmr", creator);
172
173

    creator = new P_GMResSolver::Creator;
174
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_gmres", creator);
175
176

    creator = new P_FGMResSolver::Creator;
177
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_fgmres", creator);
178
179

    creator = new P_IDRsSolver::Creator;
180
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_idr_s", creator);
181
182

    creator = new P_MinResSolver::Creator;
183
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_minres", creator);
184
185
    
    creator = new P_PreOnly::Creator;
186
187
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_preonly", creator);
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_richardson", creator);
188
189
190

#elif defined HAVE_PARALLEL_PETSC
    creator = new PetscSolverGlobalMatrix::Creator;
191
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc", creator); // standard PETSc creator
192
193
194
195
196
197
    
    std::map<std::string,std::string>::iterator it;
    PetscParameters params;
    for (it = params.solverMap.begin();
	 it!= params.solverMap.end();
	 it++) {
198
      CreatorMap< LinearSolverInterface >::addCreator("p_petsc_" + it->first, creator);
199
200
201
    }  
    
    creator = new PetscSolverGlobalBlockMatrix::Creator;
202
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-block", creator);
203
    
204
205
206
207
#if defined HAVE_PARALLEL_SOLVERS
    creator = new PetscSolverSchur::Creator;
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-schur", creator);
    
208
    creator = new PetscSolverFeti::Creator;
209
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-feti", creator);
210
211
      
    creator = new PetscSolverNavierStokes::Creator;
212
213
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-navierstokes", creator);
    CreatorMap< LinearSolverInterface >::addCreator("petsc-navierstokes", creator);
214
    
215
216
217
218
219
220
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc-ch", new PetscSolverCahnHilliard::Creator);
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc-ch2", new PetscSolverCahnHilliard2::Creator);
    
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc-nsch", new PetscSolverNSCH::Creator);
#endif
    
221
222
#elif defined  HAVE_BDDC_ML    
    creator = new BddcMlSolver::Creator;
223
    CreatorMap< LinearSolverInterface >::addCreator("bddcml", creator);
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
#endif
    }
  }
  
} // end namespace Parallel

  
#ifdef HAVE_PARALLEL_MTL4
  template< > 
  void CreatorMap<Parallel::ParallelPreconditioner>::addDefaultCreators() 
  {
    Parallel::ParallelPreconditionCreator *creator;
    
    creator =  new Parallel::P_DiagonalPreconditioner::Creator;
    addCreator("diag", creator);
    
    creator = new Parallel::P_ILUPreconditioner::Creator;
    addCreator("ilu", creator);
    
    creator = new Parallel::P_ICPreconditioner::Creator;
    addCreator("ic", creator);
    
    creator =  new Parallel::P_IdentityPreconditioner::Creator;
    addCreator("no", creator);

    creator =  new KrylovPreconditionerParallel::Creator;
    addCreator("krylov", creator);
    addCreator("solver", creator);
  }
#endif
  
} // end namespace AMDiS