Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 2529675c authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Updates estimator to work with parallel version.

parent 5781e1c6
...@@ -171,15 +171,15 @@ namespace AMDiS { ...@@ -171,15 +171,15 @@ namespace AMDiS {
/// Sum of all error estimates /// Sum of all error estimates
double est_sum; double est_sum;
/// Maximal error estimate
double est_max;
/// Sum of all time error estimates /// Sum of all time error estimates
double est_t_sum; double est_t_sum;
/// Max of all time error estimates /// Max of all time error estimates
double est_t_max; double est_t_max;
/// Maximal error estimate
double est_max;
/** \brief /** \brief
* Vector of DOFMatrix pointers. There can be more than one * Vector of DOFMatrix pointers. There can be more than one
* DOFMatrix, if the Estimator is part of a vector valued * DOFMatrix, if the Estimator is part of a vector valued
......
...@@ -23,7 +23,7 @@ namespace AMDiS { ...@@ -23,7 +23,7 @@ namespace AMDiS {
PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *) PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
{ {
// if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0) if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
std::cout << " Petsc-Iteration " << iter << ": " << rnorm << std::endl; std::cout << " Petsc-Iteration " << iter << ": " << rnorm << std::endl;
return 0; return 0;
...@@ -154,6 +154,10 @@ namespace AMDiS { ...@@ -154,6 +154,10 @@ namespace AMDiS {
VecCreate(PETSC_COMM_WORLD, &petscSolVec); VecCreate(PETSC_COMM_WORLD, &petscSolVec);
VecSetSizes(petscSolVec, nRankRows, nOverallRows); VecSetSizes(petscSolVec, nRankRows, nOverallRows);
VecSetType(petscSolVec, VECMPI); VecSetType(petscSolVec, VECMPI);
VecCreate(PETSC_COMM_WORLD, &petscTmpVec);
VecSetSizes(petscTmpVec, nRankRows, nOverallRows);
VecSetType(petscTmpVec, VECMPI);
} }
...@@ -161,6 +165,7 @@ namespace AMDiS { ...@@ -161,6 +165,7 @@ namespace AMDiS {
{ {
VecDestroy(petscRhsVec); VecDestroy(petscRhsVec);
VecDestroy(petscSolVec); VecDestroy(petscSolVec);
VecDestroy(petscTmpVec);
} }
...@@ -441,7 +446,6 @@ namespace AMDiS { ...@@ -441,7 +446,6 @@ namespace AMDiS {
FUNCNAME("ParallelDomainBase::solvePetscMatrix()"); FUNCNAME("ParallelDomainBase::solvePetscMatrix()");
KSP solver; KSP solver;
PC pc;
KSPCreate(PETSC_COMM_WORLD, &solver); KSPCreate(PETSC_COMM_WORLD, &solver);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
...@@ -449,11 +453,8 @@ namespace AMDiS { ...@@ -449,11 +453,8 @@ namespace AMDiS {
KSPSetTolerances(solver, 1.e-8, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetTolerances(solver, 1.e-8, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS); KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0); KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
KSPGetPC(solver, &pc);
PCSetType(pc, PCKSP);
KSPSetUp(solver);
KSPSolve(solver, petscRhsVec, petscSolVec); KSPSolve(solver, petscRhsVec, petscSolVec);
PetscScalar *vecPointer; PetscScalar *vecPointer;
...@@ -522,8 +523,17 @@ namespace AMDiS { ...@@ -522,8 +523,17 @@ namespace AMDiS {
for (int i = 0; i < static_cast<int>(sendBuffers.size()); i++) for (int i = 0; i < static_cast<int>(sendBuffers.size()); i++)
delete [] sendBuffers[i]; delete [] sendBuffers[i];
int iterations = 0;
KSPGetIterationNumber(solver, &iterations);
MSG(" Number of iterations: %d\n", iterations);
double norm = 0.0;
MatMult(petscMatrix, petscSolVec, petscTmpVec);
VecAXPY(petscTmpVec, -1.0, petscRhsVec);
VecNorm(petscTmpVec, NORM_2, &norm);
MSG(" Residual norm: %e\n", norm);
MatDestroy(petscMatrix); MatDestroy(petscMatrix);
PCDestroy(pc);
KSPDestroy(solver); KSPDestroy(solver);
} }
......
...@@ -359,6 +359,8 @@ namespace AMDiS { ...@@ -359,6 +359,8 @@ namespace AMDiS {
Vec petscSolVec; Vec petscSolVec;
Vec petscTmpVec;
/// Number of DOFs in the rank mesh. /// Number of DOFs in the rank mesh.
int nRankDOFs; int nRankDOFs;
......
...@@ -6,6 +6,10 @@ ...@@ -6,6 +6,10 @@
#include "Traverse.h" #include "Traverse.h"
#include "Parameters.h" #include "Parameters.h"
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#include "mpi.h"
#endif
namespace AMDiS { namespace AMDiS {
ResidualEstimator::ResidualEstimator(std::string name, int r) ResidualEstimator::ResidualEstimator(std::string name, int r)
...@@ -126,6 +130,18 @@ namespace AMDiS { ...@@ -126,6 +130,18 @@ namespace AMDiS {
{ {
FUNCNAME("ResidualEstimator::exit()"); FUNCNAME("ResidualEstimator::exit()");
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
double send_est_sum = est_sum;
double send_est_max = est_max;
double send_est_t_sum = est_t_sum;
double send_est_t_max = est_t_max;
MPI::COMM_WORLD.Allreduce(&send_est_sum, &est_sum, 1, MPI_DOUBLE, MPI_SUM);
MPI::COMM_WORLD.Allreduce(&send_est_max, &est_max, 1, MPI_DOUBLE, MPI_MAX);
MPI::COMM_WORLD.Allreduce(&send_est_t_sum, &est_t_sum, 1, MPI_DOUBLE, MPI_SUM);
MPI::COMM_WORLD.Allreduce(&send_est_t_max, &est_t_max, 1, MPI_DOUBLE, MPI_MAX);
#endif
est_sum = sqrt(est_sum); est_sum = sqrt(est_sum);
est_t_sum = sqrt(est_t_sum); est_t_sum = sqrt(est_t_sum);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment