Skip to content
Snippets Groups Projects
PetscProblemStat.cc 2.97 KiB
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology 
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.


#include <vector>
#include <set>

#include "parallel/PetscProblemStat.h"
#include "parallel/PetscSolver.h"
#include "parallel/MpiHelper.h"

namespace AMDiS {

  using namespace std;


  PetscProblemStat::PetscProblemStat(string nameStr,
				     ProblemIterationInterface *problemIteration)
    : ParallelProblemStatBase(nameStr, problemIteration)
  {
    FUNCNAME("PetscProblemStat::PetscProblemStat()");

    string name("");
    Parameters::get("parallel->solver", name);
    
    if (name == "petsc-schur") {
#ifdef HAVE_PETSC_DEV
      petscSolver = new PetscSolverSchur();
#else
      ERROR_EXIT("PETSc schur complement solver is only supported when petsc-dev is used!\n");
#endif
    } else if (name == "petsc-feti") {
#ifdef HAVE_PETSC_DEV
      petscSolver = new PetscSolverFeti();
#else
      ERROR_EXIT("PETSc FETI-DP solver is only supported when petsc-dev is used!\n");
#endif
    } else if (name == "petsc" || name == "") {
      petscSolver = new PetscSolverGlobalMatrix();
    } else {
      ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
    }
  }


  void PetscProblemStat::initialize(Flag initFlag, 
				    ProblemStatSeq* adoptProblem,
				    Flag adoptFlag)
  {
      ParallelProblemStatBase::initialize(initFlag, adoptProblem, adoptFlag);

      meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
  }


  void PetscProblemStat::solve(AdaptInfo *adaptInfo, bool fixedMatrix)
  {
    FUNCNAME("PetscProblemStat::solve()");

    TEST_EXIT(meshDistributor)("Should not happen!\n");

    double wtime = MPI::Wtime();

    double vm, rss;
    processMemUsage(vm, rss);   
    vm /= 1024.0;
    rss /= 1024.0;
    
    MSG("STAGE 1\n");
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
    
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);

    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);

    petscSolver->setMeshDistributor(meshDistributor);
    petscSolver->fillPetscMatrix(systemMatrix, rhs);


    processMemUsage(vm, rss);   
    vm /= 1024.0;
    rss /= 1024.0;
    MSG("STAGE 2\n");
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
    
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);

    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);


    petscSolver->solvePetscMatrix(*solution, adaptInfo);   

    processMemUsage(vm, rss);   
    vm /= 1024.0;
    rss /= 1024.0;
    MSG("STAGE 3\n");
    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
    
    mpi::globalAdd(vm);
    mpi::globalAdd(rss);

    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);


    INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
		  MPI::Wtime() - wtime);
  }

}