Commit 2529675c authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Updates estimator to work with parallel version.

parent 5781e1c6
......@@ -171,15 +171,15 @@ namespace AMDiS {
/// Sum of all error estimates
double est_sum;
/// Maximal error estimate
double est_max;
/// Sum of all time error estimates
double est_t_sum;
/// Max of all time error estimates
double est_t_max;
/// Maximal error estimate
double est_max;
/** \brief
* Vector of DOFMatrix pointers. There can be more than one
* DOFMatrix, if the Estimator is part of a vector valued
......
......@@ -23,7 +23,7 @@ namespace AMDiS {
PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
{
// if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
std::cout << " Petsc-Iteration " << iter << ": " << rnorm << std::endl;
return 0;
......@@ -154,6 +154,10 @@ namespace AMDiS {
VecCreate(PETSC_COMM_WORLD, &petscSolVec);
VecSetSizes(petscSolVec, nRankRows, nOverallRows);
VecSetType(petscSolVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
VecSetType(petscTmpVec, VECMPI);
}
......@@ -161,6 +165,7 @@ namespace AMDiS {
{
VecDestroy(petscRhsVec);
VecDestroy(petscSolVec);
VecDestroy(petscTmpVec);
}
......@@ -441,7 +446,6 @@ namespace AMDiS {
FUNCNAME("ParallelDomainBase::solvePetscMatrix()");
KSP solver;
PC pc;
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
......@@ -449,11 +453,8 @@ namespace AMDiS {
KSPSetTolerances(solver, 1.e-8, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
KSPGetPC(solver, &pc);
PCSetType(pc, PCKSP);
KSPSetUp(solver);
KSPSolve(solver, petscRhsVec, petscSolVec);
PetscScalar *vecPointer;
......@@ -522,8 +523,17 @@ namespace AMDiS {
for (int i = 0; i < static_cast<int>(sendBuffers.size()); i++)
delete [] sendBuffers[i];
int iterations = 0;
KSPGetIterationNumber(solver, &iterations);
MSG(" Number of iterations: %d\n", iterations);
double norm = 0.0;
MatMult(petscMatrix, petscSolVec, petscTmpVec);
VecAXPY(petscTmpVec, -1.0, petscRhsVec);
VecNorm(petscTmpVec, NORM_2, &norm);
MSG(" Residual norm: %e\n", norm);
MatDestroy(petscMatrix);
PCDestroy(pc);
KSPDestroy(solver);
}
......
......@@ -359,6 +359,8 @@ namespace AMDiS {
Vec petscSolVec;
Vec petscTmpVec;
/// Number of DOFs in the rank mesh.
int nRankDOFs;
......
......@@ -6,6 +6,10 @@
#include "Traverse.h"
#include "Parameters.h"
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#include "mpi.h"
#endif
namespace AMDiS {
ResidualEstimator::ResidualEstimator(std::string name, int r)
......@@ -126,6 +130,18 @@ namespace AMDiS {
{
FUNCNAME("ResidualEstimator::exit()");
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
double send_est_sum = est_sum;
double send_est_max = est_max;
double send_est_t_sum = est_t_sum;
double send_est_t_max = est_t_max;
MPI::COMM_WORLD.Allreduce(&send_est_sum, &est_sum, 1, MPI_DOUBLE, MPI_SUM);
MPI::COMM_WORLD.Allreduce(&send_est_max, &est_max, 1, MPI_DOUBLE, MPI_MAX);
MPI::COMM_WORLD.Allreduce(&send_est_t_sum, &est_t_sum, 1, MPI_DOUBLE, MPI_SUM);
MPI::COMM_WORLD.Allreduce(&send_est_t_max, &est_t_max, 1, MPI_DOUBLE, MPI_MAX);
#endif
est_sum = sqrt(est_sum);
est_t_sum = sqrt(est_t_sum);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment