Commit e98887be authored by Naumann, Andreas's avatar Naumann, Andreas

petsc error messages and debugging, petscSolver with more consistent interface...

petsc error messages and debugging, petscSolver with more consistent interface compared to sequentiell solvers
parent 14519454
......@@ -20,7 +20,9 @@
#include "Line.h"
#include "Triangle.h"
#include "Tetrahedron.h"
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
#include "petsc.h"
#endif
namespace AMDiS {
const char *funcName = NULL;
......@@ -176,8 +178,11 @@ namespace AMDiS {
vsprintf(buff, format, arg);
PRINT_LINE((*error), buff);
va_end(arg);
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
PetscError(MPI_COMM_WORLD, __LINE__, "Msg::print_error_exit", "Global.cc", "src/", 1, PETSC_ERROR_INITIAL, buff);
#else
throw std::runtime_error(buff);
#endif
exit(1);
}
......
......@@ -102,7 +102,7 @@ namespace AMDiS {
return -1;
}
inline int solveSystem(const SolverMatrix<Matrix<DOFMatrix*> >& A,
virtual int solveSystem(const SolverMatrix<Matrix<DOFMatrix*> >& A,
SystemVector& x,
SystemVector& b,
bool createMatrixData,
......
......@@ -28,7 +28,8 @@ namespace AMDiS {
PetscProblemStat::PetscProblemStat(string nameStr,
ProblemIterationInterface *problemIteration)
: ParallelProblemStatBase(nameStr, problemIteration),
deleteSolver(true)
deleteSolver(true),
petscSolver(NULL)
{
FUNCNAME("PetscProblemStat::PetscProblemStat()");
......@@ -48,11 +49,11 @@ namespace AMDiS {
Flag adoptFlag)
{
ParallelProblemStatBase::initialize(initFlag, adoptProblem, adoptFlag);
TEST_EXIT(petscSolver != NULL)("petsc parallelization does not work without petsc solver\n");
string tmp("");
Parameters::get(getName() + "->solver->petsc prefix", tmp);
petscSolver->setKspPrefix(tmp);
meshDistributor->setBoundaryDofRequirement(petscSolver->getBoundaryDofRequirement());
petscSolver->setMeshDistributor(meshDistributor, 0);
......
......@@ -89,6 +89,32 @@ namespace AMDiS {
}
}
int PetscSolver::solveSystem(const SolverMatrix<Matrix<DOFMatrix*> >& A,
SystemVector& x,
SystemVector& b,
bool createMatrixData,
bool storeMatrixData)
{
MPI::COMM_WORLD.Barrier();
double wtime = MPI::Wtime();
if (createMatrixData)
fillPetscMatrix(const_cast< Matrix< DOFMatrix* >* >(A.getOriginalMat()));
fillPetscRhs(&b);
INFO(info, 8)("creation of parallel data structures needed %.5f seconds\n",
MPI::Wtime() - wtime);
wtime = MPI::Wtime();
solvePetscMatrix(x, NULL);
INFO(info, 8)("solution of discrete system needed %.5f seconds\n",
MPI::Wtime() - wtime);
destroyVectorData();
return 0;
}
void PetscSolver::fillPetscMatrix(DOFMatrix* mat)
{
......
......@@ -58,6 +58,13 @@ namespace AMDiS {
vector<const FiniteElemSpace*> &feSpaces,
bool createGlobalMapping = true);
///consistent interface to sequentiel solvers
int solveSystem(const SolverMatrix<Matrix<DOFMatrix*> >& A,
SystemVector& x,
SystemVector& b,
bool createMatrixData,
bool storeMatrixData);
/** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment