PetscProblemStat.cc 2.89 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


#include <vector>
#include <set>

#include "parallel/PetscProblemStat.h"
#include "parallel/PetscSolver.h"
18
#include "parallel/MpiHelper.h"
19
20
21
22
23
24

namespace AMDiS {

  using namespace std;


25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
  PetscProblemStat::PetscProblemStat(string nameStr,
				     ProblemIterationInterface *problemIteration)
    : ParallelProblemStatBase(nameStr, problemIteration)
  {
    FUNCNAME("PetscProblemStat::PetscProblemStat()");

    string name("");
    Parameters::get("parallel->solver", name);
    
    if (name == "petsc-schur") {
#ifdef HAVE_PETSC_DEV
      petscSolver = new PetscSolverSchur();
#else
      ERROR_EXIT("PETSc schur complement solver is only supported when petsc-dev is used!\n");
#endif
    } else if (name == "petsc-feti") {
#ifdef HAVE_PETSC_DEV
      petscSolver = new PetscSolverFeti();
#else
      ERROR_EXIT("PETSc FETI-DP solver is only supported when petsc-dev is used!\n");
#endif
    } else if (name == "petsc" || name == "") {
      petscSolver = new PetscSolverGlobalMatrix();
    } else {
      ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
    }
  }


  void PetscProblemStat::initialize(Flag initFlag, 
				    ProblemStatSeq* adoptProblem,
				    Flag adoptFlag)
  {
      ParallelProblemStatBase::initialize(initFlag, adoptProblem, adoptFlag);

      meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
  }


64
65
66
67
68
69
70
71
  void PetscProblemStat::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
  {
    FUNCNAME("PetscProblemStat::solve()");

    TEST_EXIT(meshDistributor)("Should not happen!\n");

    double wtime = MPI::Wtime();

72
#if 0
73
    double vm, rss;
74
    processMemUsage(vm, rss);       
75
    MSG("STAGE 1\n");
76
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
77
78
79
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);
    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
80
#endif
81

82
    petscSolver->setMeshDistributor(meshDistributor);
83
    petscSolver->fillPetscMatrix(systemMatrix, rhs);
84

85
#if 0
86
87
    processMemUsage(vm, rss);   
    MSG("STAGE 2\n");
88
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
89
90
91
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);
    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
92
#endif
93

94
    petscSolver->solvePetscMatrix(*solution, adaptInfo);   
95

96
#if 0
97
98
    processMemUsage(vm, rss);   
    MSG("STAGE 3\n");
99
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
100
101
102
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);
    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
103
#endif
104
105


106
107
108
109
110
    INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
		  MPI::Wtime() - wtime);
  }

}