ParallelProblemStat.cc 6.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
 * Authors: 
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
 * 
 ******************************************************************************/


#include "parallel/ParallelProblemStat.h"
#include "parallel/ParallelSolver.h"
#include "parallel/MeshDistributor.h"
#include "parallel/MpiHelper.h"

#include "parallel/ParallelMapper.h"
#include "solver/LinearSolver.h"

#ifdef HAVE_PARALLEL_MTL4
  #include "parallel/PITL_Solver.h"
  #include "solver/KrylovPreconditioner.h"
#elif defined HAVE_PARALLEL_PETSC
  #include "parallel/PetscSolverFeti.h"
  #include "parallel/PetscSolverSchur.h"
  #include "parallel/PetscSolverGlobalBlockMatrix.h"
  #include "parallel/PetscSolverGlobalMatrix.h"
  #include "parallel/PetscSolverNavierStokes.h"
#endif
#include "Global.h"


#if defined HAVE_PARALLEL_PETSC || defined HAVE_PETSC
  #include "solver/PetscTypes.h"
#endif

namespace AMDiS { namespace Parallel {

  double ParallelProblemStat::initTimeStamp = 0.0;
  bool ParallelProblemStat::initialized = false;


  ParallelProblemStat::ParallelProblemStat(std::string nameStr,
					   ProblemIterationInterface *problemIteration)
    : ProblemStatSeq(nameStr, problemIteration),
      meshDistributor(NULL)
  {
    initTimeStamp = MPI::Wtime();
    mpi::globalMin(initTimeStamp);
    addSolvers();
  }


  void ParallelProblemStat::initialize(Flag initFlag,
					   ProblemStatSeq *adoptProblem,
					   Flag adoptFlag)
  {
    FUNCNAME("ParallelProblemStat::initialize()");
    
    MSG("Initialization phase 0 needed %.5f seconds\n", 
	MPI::Wtime() - initTimeStamp);

    ProblemStatSeq::initialize(initFlag, adoptProblem, adoptFlag);

    MeshDistributor::addProblemStatGlobal(this);
    meshDistributor = MeshDistributor::globalMeshDistributor;
    meshDistributor->addInterchangeVector(getSolution());
        
    ParallelSolver *parallelSolver = dynamic_cast<ParallelSolver*>(solver);
    TEST_EXIT(parallelSolver != NULL)
      ("ParallelProblem loaded, but no ParallelSolver selected! This does not fit together.\n");

    parallelSolver->setMeshDistributor(meshDistributor, 0);
    parallelSolver->init(getComponentSpaces(), getFeSpaces());
  }


  void ParallelProblemStat::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
						  bool assembleMatrix,
						  bool assembleVector)
  {
    FUNCNAME("ParallelProblemStat::buildAfterCoarsen()");

    TEST_EXIT(MeshDistributor::globalMeshDistributor != NULL)
      ("No Meshdistributor! Should not happen!\n");

    MeshDistributor::globalMeshDistributor->checkMeshChange();
    ProblemStatSeq::buildAfterCoarsen(adaptInfo, flag, 
				      assembleMatrix, assembleVector);
  }


  void ParallelProblemStat::addPeriodicBC(BoundaryType type, int row, int col)
  {
    if (MeshDistributor::globalMeshDistributor->isInitialized())
      return;

    ProblemStatSeq::addPeriodicBC(type, row, col);
  }
  

  void ParallelProblemStat::addSolvers()
  {
    if (!initialized) {
    initialized = true;
    LinearSolverCreator *creator;

#if defined HAVE_PARALLEL_MTL4
    creator = new P_CGSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_cg", creator);

    creator = new P_CGSSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_cgs", creator);

//     creator = new P_BiCGSolver::Creator;
//     CreatorMap< LinearSolver >::addCreator("p_mtl_bicg", creator);

    creator = new P_BiCGStabSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_bicgstab", creator);

    creator = new P_BiCGStab2Solver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_bicgstab2", creator);

    creator = new P_BiCGStabEllSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_bicgstab_ell", creator);

    creator = new P_QMRSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_qmr", creator);

    creator = new P_TFQMRSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_tfqmr", creator);

    creator = new P_GMResSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_gmres", creator);

    creator = new P_FGMResSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_fgmres", creator);

    creator = new P_IDRsSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_idr_s", creator);

    creator = new P_MinResSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_minres", creator);
    
    creator = new P_PreOnly::Creator;
    CreatorMap< LinearSolver >::addCreator("p_mtl_preonly", creator);
    CreatorMap< LinearSolver >::addCreator("p_mtl_richardson", creator);

#elif defined HAVE_PARALLEL_PETSC
    creator = new PetscSolverGlobalMatrix::Creator;
    CreatorMap< LinearSolver >::addCreator("p_petsc_petsc", creator); // standard PETSc creator
    
    std::map<std::string,std::string>::iterator it;
    PetscParameters params;
    for (it = params.solverMap.begin();
	 it!= params.solverMap.end();
	 it++) {
      CreatorMap< LinearSolver >::addCreator("p_petsc_" + it->first, creator);
    }  
    
    creator = new PetscSolverSchur::Creator;
    CreatorMap< LinearSolver >::addCreator("p_petsc_petsc-schur", creator);
    
    creator = new PetscSolverGlobalBlockMatrix::Creator;
    CreatorMap< LinearSolver >::addCreator("p_petsc_petsc-block", creator);
    
    creator = new PetscSolverFeti::Creator;
    CreatorMap< LinearSolver >::addCreator("p_petsc_petsc-feti", creator);
      
    creator = new PetscSolverNavierStokes::Creator;
    CreatorMap< LinearSolver >::addCreator("p_petsc_petsc-navierstokes", creator);
182
    CreatorMap< LinearSolver >::addCreator("petsc-navierstokes", creator);
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
    
#elif defined  HAVE_BDDC_ML    
    creator = new BddcMlSolver::Creator;
    CreatorMap< LinearSolver >::addCreator("bddcml", creator);
#endif
    }
  }
  
} // end namespace Parallel

  
#ifdef HAVE_PARALLEL_MTL4
  template< > 
  void CreatorMap<Parallel::ParallelPreconditioner>::addDefaultCreators() 
  {
    Parallel::ParallelPreconditionCreator *creator;
    
    creator =  new Parallel::P_DiagonalPreconditioner::Creator;
    addCreator("diag", creator);
    
    creator = new Parallel::P_ILUPreconditioner::Creator;
    addCreator("ilu", creator);
    
    creator = new Parallel::P_ICPreconditioner::Creator;
    addCreator("ic", creator);
    
    creator =  new Parallel::P_IdentityPreconditioner::Creator;
    addCreator("no", creator);

    creator =  new KrylovPreconditionerParallel::Creator;
    addCreator("krylov", creator);
    addCreator("solver", creator);
  }
#endif
  
} // end namespace AMDiS