ParallelProblemStat.cc 7.11 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
/******************************************************************************
 *
 * AMDiS - Adaptive multidimensional simulations
 *
 * Copyright (C) 2013 Dresden University of Technology. All Rights Reserved.
 * Web: https://fusionforge.zih.tu-dresden.de/projects/amdis
 *
 * Authors: 
 * Simon Vey, Thomas Witkowski, Andreas Naumann, Simon Praetorius, et al.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
 *
 *
 * This file is part of AMDiS
 *
 * See also license.opensource.txt in the distribution.
 * 
 ******************************************************************************/


#include "parallel/ParallelProblemStat.h"
#include "parallel/ParallelSolver.h"
#include "parallel/MeshDistributor.h"
#include "parallel/MpiHelper.h"

#include "parallel/ParallelMapper.h"
28
#include "solver/LinearSolverInterface.h"
29
30
31
32
33
34
35
36
37
38
39
40
41
42

#ifdef HAVE_PARALLEL_MTL4
  #include "parallel/PITL_Solver.h"
  #include "solver/KrylovPreconditioner.h"
#elif defined HAVE_PARALLEL_PETSC
  #include "parallel/PetscSolverFeti.h"
  #include "parallel/PetscSolverSchur.h"
  #include "parallel/PetscSolverGlobalBlockMatrix.h"
  #include "parallel/PetscSolverGlobalMatrix.h"
  #include "parallel/PetscSolverNavierStokes.h"
#endif
#include "Global.h"


43
#if defined HAVE_PARALLEL_PETSC || defined HAVE_SEQ_PETSC
44
45
46
47
48
49
50
51
52
53
54
55
  #include "solver/PetscTypes.h"
#endif

namespace AMDiS { namespace Parallel {

  double ParallelProblemStat::initTimeStamp = 0.0;
  bool ParallelProblemStat::initialized = false;


  ParallelProblemStat::ParallelProblemStat(std::string nameStr,
					   ProblemIterationInterface *problemIteration)
    : ProblemStatSeq(nameStr, problemIteration),
56
      meshDistributor(NULL)
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
  {
    initTimeStamp = MPI::Wtime();
    mpi::globalMin(initTimeStamp);
    addSolvers();
  }


  void ParallelProblemStat::initialize(Flag initFlag,
					   ProblemStatSeq *adoptProblem,
					   Flag adoptFlag)
  {
    FUNCNAME("ParallelProblemStat::initialize()");
    
    MSG("Initialization phase 0 needed %.5f seconds\n", 
	MPI::Wtime() - initTimeStamp);

    ProblemStatSeq::initialize(initFlag, adoptProblem, adoptFlag);

    MeshDistributor::addProblemStatGlobal(this);
    meshDistributor = MeshDistributor::globalMeshDistributor;
    meshDistributor->addInterchangeVector(getSolution());
        
    ParallelSolver *parallelSolver = dynamic_cast<ParallelSolver*>(solver);
80
    TEST_EXIT(parallelSolver != NULL)
81
82
83
84
85
86
87
88
89
90
91
92
93
      ("ParallelProblem loaded, but no ParallelSolver selected! This does not fit together.\n");

    parallelSolver->setMeshDistributor(meshDistributor, 0);
    parallelSolver->init(getComponentSpaces(), getFeSpaces());
  }


  void ParallelProblemStat::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
						  bool assembleMatrix,
						  bool assembleVector)
  {
    FUNCNAME("ParallelProblemStat::buildAfterCoarsen()");

94
    TEST_EXIT(MeshDistributor::globalMeshDistributor != NULL)
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
      ("No Meshdistributor! Should not happen!\n");

    MeshDistributor::globalMeshDistributor->checkMeshChange();
    ProblemStatSeq::buildAfterCoarsen(adaptInfo, flag, 
				      assembleMatrix, assembleVector);
  }


  void ParallelProblemStat::addPeriodicBC(BoundaryType type, int row, int col)
  {
    if (MeshDistributor::globalMeshDistributor->isInitialized())
      return;

    ProblemStatSeq::addPeriodicBC(type, row, col);
  }
  

  void ParallelProblemStat::addSolvers()
  {
    if (!initialized) {
    initialized = true;
    LinearSolverCreator *creator;

#if defined HAVE_PARALLEL_MTL4
    creator = new P_CGSolver::Creator;
120
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_cg", creator);
121
122

    creator = new P_CGSSolver::Creator;
123
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_cgs", creator);
124
125

//     creator = new P_BiCGSolver::Creator;
126
//     CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicg", creator);
127
128

    creator = new P_BiCGStabSolver::Creator;
129
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab", creator);
130
131

    creator = new P_BiCGStab2Solver::Creator;
132
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab2", creator);
133
134

    creator = new P_BiCGStabEllSolver::Creator;
135
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_bicgstab_ell", creator);
136
137

    creator = new P_QMRSolver::Creator;
138
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_qmr", creator);
139
140

    creator = new P_TFQMRSolver::Creator;
141
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_tfqmr", creator);
142
143

    creator = new P_GMResSolver::Creator;
144
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_gmres", creator);
145
146

    creator = new P_FGMResSolver::Creator;
147
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_fgmres", creator);
148
149

    creator = new P_IDRsSolver::Creator;
150
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_idr_s", creator);
151
152

    creator = new P_MinResSolver::Creator;
153
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_minres", creator);
154
155
    
    creator = new P_PreOnly::Creator;
156
157
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_preonly", creator);
    CreatorMap< LinearSolverInterface >::addCreator("p_mtl_richardson", creator);
158
159
160

#elif defined HAVE_PARALLEL_PETSC
    creator = new PetscSolverGlobalMatrix::Creator;
161
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc", creator); // standard PETSc creator
162
163
164
165
166
167
    
    std::map<std::string,std::string>::iterator it;
    PetscParameters params;
    for (it = params.solverMap.begin();
	 it!= params.solverMap.end();
	 it++) {
168
      CreatorMap< LinearSolverInterface >::addCreator("p_petsc_" + it->first, creator);
169
170
171
    }  
    
    creator = new PetscSolverSchur::Creator;
172
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-schur", creator);
173
174
    
    creator = new PetscSolverGlobalBlockMatrix::Creator;
175
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-block", creator);
176
177
    
    creator = new PetscSolverFeti::Creator;
178
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-feti", creator);
179
180
      
    creator = new PetscSolverNavierStokes::Creator;
181
182
    CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-navierstokes", creator);
    CreatorMap< LinearSolverInterface >::addCreator("petsc-navierstokes", creator);
183
184
185
    
#elif defined  HAVE_BDDC_ML    
    creator = new BddcMlSolver::Creator;
186
    CreatorMap< LinearSolverInterface >::addCreator("bddcml", creator);
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
#endif
    }
  }
  
} // end namespace Parallel

  
#ifdef HAVE_PARALLEL_MTL4
  template< > 
  void CreatorMap<Parallel::ParallelPreconditioner>::addDefaultCreators() 
  {
    Parallel::ParallelPreconditionCreator *creator;
    
    creator =  new Parallel::P_DiagonalPreconditioner::Creator;
    addCreator("diag", creator);
    
    creator = new Parallel::P_ILUPreconditioner::Creator;
    addCreator("ilu", creator);
    
    creator = new Parallel::P_ICPreconditioner::Creator;
    addCreator("ic", creator);
    
    creator =  new Parallel::P_IdentityPreconditioner::Creator;
    addCreator("no", creator);

    creator =  new KrylovPreconditionerParallel::Creator;
    addCreator("krylov", creator);
    addCreator("solver", creator);
  }
#endif
  
} // end namespace AMDiS