Commit 1ded5c55 authored by Praetorius, Simon's avatar Praetorius, Simon
parents 653bcf79 288da754
......@@ -304,16 +304,12 @@ namespace AMDiS
PCSetFromOptions(pc);
#ifndef NDEBUG
MSG("PetscOptionsView:\n");
PetscViewer viewer;
PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
PetscViewerSetType(viewer, PETSCVIEWERASCII);
#if (PETSC_VERSION_MINOR >= 7)
PetscOptionsView(PETSC_NULL, viewer);
#else
PetscOptionsView(viewer);
#endif
PetscViewerDestroy(&viewer);
MSG("PetscOptionsView:\n");
PetscViewer viewer;
PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
PetscViewerSetType(viewer, PETSCVIEWERASCII);
petsc::options_view(viewer);
PetscViewerDestroy(&viewer);
#endif
}
......@@ -334,19 +330,15 @@ namespace AMDiS
void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix, int info)
{
KSPCreate(comm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, m, m);
#else
KSPSetOperators(ksp, m, m, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, m, m);
KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(ksp, KSPBCGS);
KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
if (info >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (info >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
}
} // end namespace petsc_helper
......
......@@ -31,13 +31,6 @@
#include <petsc.h>
#include "AMDiS_fwd.h"
#if (PETSC_VERSION_MINOR >= 7)
#define PETSC_MONITOR_CAST(...) \
(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))(__VA_ARGS__)
#else
#define PETSC_MONITOR_CAST(...) __VA_ARGS__
#endif
namespace AMDiS
{
namespace Parallel
......@@ -126,7 +119,82 @@ namespace AMDiS
void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0);
} // end namespace petsc_helper
} // end namespace Parallel
// functions for PETSc API changes
namespace petsc
{
inline PetscErrorCode options_view(PetscViewer viewer)
{
#if (PETSC_VERSION_MINOR >= 7)
return PetscOptionsView(PETSC_NULL, viewer);
#else
return PetscOptionsView(viewer);
#endif
}
inline PetscErrorCode options_insert_string(const char in_str[])
{
#if (PETSC_VERSION_MINOR >= 7)
return PetscOptionsInsertString(PETSC_NULL, in_str);
#else
return PetscOptionsInsertString(in_str);
#endif
}
inline PetscErrorCode ksp_set_operators(KSP ksp, Mat Amat,Mat Pmat)
{
#if (PETSC_VERSION_MINOR >= 5)
return KSPSetOperators(ksp, Amat, Pmat);
#else
return KSPSetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
}
inline PetscErrorCode ksp_get_operators(KSP ksp, Mat *Amat,Mat *Pmat)
{
#if (PETSC_VERSION_MINOR >= 5)
return KSPGetOperators(ksp, Amat, Pmat);
#else
return KSPGetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
}
template <class Monitor>
inline PetscErrorCode ksp_monitor_set(KSP ksp, Monitor monitor)
{
#if (PETSC_VERSION_MINOR >= 7)
PetscViewerAndFormat *vf;
PetscErrorCode ierr;
ierr = PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,&vf);CHKERRQ(ierr);
ierr = KSPMonitorSet(ksp,(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr);
return ierr;
#else
return KSPMonitorSet(ksp, monitor, PETSC_NULL, PETSC_NULL);
#endif
}
inline PetscErrorCode mat_create_vecs(Mat mat,Vec *right,Vec *left)
{
#if (PETSC_VERSION_MINOR >= 6)
return MatCreateVecs(mat, right, left);
#else
return MatGetVecs(mat, right, left);
#endif
}
inline PetscErrorCode mat_nullspace_remove(MatNullSpace sp,Vec vec)
{
#if (PETSC_VERSION_MINOR >= 5)
return MatNullSpaceRemove(sp, vec);
#else
return MatNullSpaceRemove(sp, vec, PETSC_NULL);
#endif
}
} // end namespace petsc
} // end namespace AMDiS
#endif
......@@ -26,6 +26,7 @@
#include "parallel/MeshDistributor.h"
#include "parallel/MpiHelper.h"
#include "parallel/ParallelDofMapping.h"
#include "parallel/PetscHelper.h"
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
......@@ -49,7 +50,7 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->ksp", kspStr);
if (kspStr != "")
petsc_options_insert_string(kspStr.c_str());
petsc::options_insert_string(kspStr.c_str());
Parameters::get(name + "->remove rhs null space", removeRhsNullspace);
Parameters::get(name + "->has constant null space", hasConstantNullspace);
......
......@@ -85,15 +85,11 @@ namespace AMDiS { namespace Parallel {
{
// Create FGMRES based outer solver
KSPCreate(meshDistributor->getMpiComm(0), &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
if (getInfo() >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (getInfo() >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations());
KSPSetFromOptions(ksp);
......
......@@ -67,11 +67,7 @@ namespace AMDiS { namespace Parallel {
/// create new solver for S
KSP kspS;
KSPCreate(*data->mpiCommGlobal, &kspS);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(kspS, S, S);
#else
KSPSetOperators(kspS, S, S, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(kspS, S, S);
petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 1);
{
PC pc;
......@@ -117,15 +113,11 @@ namespace AMDiS { namespace Parallel {
MSG("CREATE POS 1: %p\n", &ksp);
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
if (getInfo() >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (getInfo() >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations());
if (useOldInitialGuess)
......
......@@ -1051,11 +1051,7 @@ namespace AMDiS { namespace Parallel {
}
KSPCreate(domainComm, &ksp_schur_primal);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
#else
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPGMRES);
KSPSetFromOptions(ksp_schur_primal);
......@@ -1075,11 +1071,7 @@ namespace AMDiS { namespace Parallel {
// === Create KSP solver object and set appropriate solver options. ===
KSPCreate(domainComm, &ksp_schur_primal);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
#else
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPPREONLY);
PC pc_schur_primal;
......@@ -1365,11 +1357,7 @@ namespace AMDiS { namespace Parallel {
}
KSPCreate(domainComm, &ksp_feti);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_feti, mat_feti, mat_feti);
#else
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti);
KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
......@@ -1382,9 +1370,9 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->feti->monitor", monitor);
if (monitor) {
if (stokesMode)
KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorFetiStokes), &fetiKspData, PETSC_NULL);
petsc::ksp_monitor_set(ksp_feti, KSPMonitorFetiStokes);
else
KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp_feti, KSPMonitorTrueResidualNorm);
}
......@@ -1443,11 +1431,7 @@ namespace AMDiS { namespace Parallel {
(void(*)(void))petscMultMatFetiInexact);
KSPCreate(domainComm, &ksp_feti);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_feti, mat_feti, mat_feti);
#else
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti);
KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
......@@ -1464,11 +1448,7 @@ namespace AMDiS { namespace Parallel {
createVec(localDofMap, fetiInexactPreconData.tmp_vec_b0);
KSPCreate(domainComm, &(fetiInexactPreconData.ksp_pc_feti));
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange);
#else
KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange);
KSPGetPC(fetiInexactPreconData.ksp_pc_feti,
&(fetiInexactPreconData.pc_feti));
createFetiPreconLumped(fetiInexactPreconData.pc_feti);
......@@ -1505,17 +1485,11 @@ namespace AMDiS { namespace Parallel {
VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel),
localDofMap.getRankDofs(),
nGlobalOverallInterior, &(lumpedData->tmp_vec_b0));
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals0));
MatCreateVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals1));
#else
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals1));
#endif
petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals0));
petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals1));
for (int component = 0; component < static_cast<int>(componentSpaces.size());
component++) {
......@@ -1607,11 +1581,7 @@ namespace AMDiS { namespace Parallel {
("Stokes mode does not yet support the Dirichlet precondition!\n");
KSPCreate(PETSC_COMM_SELF, &ksp_interior);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior);
#else
KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_interior, mat_interior_interior, mat_interior_interior);
KSPSetOptionsPrefix(ksp_interior, "precon_interior_");
KSPSetType(ksp_interior, KSPPREONLY);
PC pc_interior;
......@@ -1635,21 +1605,13 @@ namespace AMDiS { namespace Parallel {
VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel),
localDofMap.getRankDofs(),
nGlobalOverallInterior, &(fetiDirichletPreconData.tmp_vec_b));
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals0));
MatCreateVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals1));
MatCreateVecs(mat_interior_interior, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_interior));
#else
MatGetVecs(mat_duals_duals, PETSC_NULL,
petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL,
petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals1));
MatGetVecs(mat_interior_interior, PETSC_NULL,
petsc::mat_create_vecs(mat_interior_interior, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_interior));
#endif
TEST_EXIT_DBG(subDomainIsLocal)
("Should not happen, check usage of localDofMap!\n");
......@@ -2509,13 +2471,8 @@ namespace AMDiS { namespace Parallel {
Vec vecRhs, vecSol;
Vec vecRhsLagrange, vecSolLagrange;
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatCreateVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
#else
MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
#endif
petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
vecRhs = vecRhsLagrange;
vecSol = vecSolLagrange;
......
......@@ -20,6 +20,7 @@
#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/PetscHelper.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
......@@ -150,11 +151,7 @@ namespace AMDiS { namespace Parallel {
FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()");
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
KSPSetFromOptions(ksp);
}
......
......@@ -22,6 +22,7 @@
// #include "DirichletBC.h"
#include "DOFVector.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "parallel/PetscHelper.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
#include "solver/PetscTypes.h"
......@@ -50,15 +51,15 @@ namespace AMDiS { namespace Parallel {
if (params.matSolverPackage.find(kspSolver) != params.matSolverPackage.end()) {
// direct solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_type lu").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str());
petsc::options_insert_string(("-" + kspPrefix + "pc_type lu").c_str());
petsc::options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str());
setMaxIterations(1);
zeroStartVector = true;
matSolverPackage = true;
} else if (params.emptyParam.find(kspSolver) == params.emptyParam.end() && solverName != "petsc") {
// other solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str());
}
// set the preconditioner
......@@ -70,17 +71,17 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->right precon", precon);
if (!matSolverPackage && params.emptyParam.find(precon) == params.emptyParam.end()) {
precon = (params.preconMap.find(precon) != params.preconMap.end() ? params.preconMap[precon] : precon);
petsc_options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str());
petsc::options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str());
}
petsc_options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str());
petsc_options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str());
petsc_options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str());
if (getInfo() >= 20)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str());
else if (getInfo() >= 10)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str());
}
if (!matSolverPackage) {
Parameters::get(name + "->use zero start vector", zeroStartVector);
......@@ -95,11 +96,7 @@ namespace AMDiS { namespace Parallel {
PetscViewer viewer;
PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
PetscViewerSetType(viewer, PETSCVIEWERASCII);
#if (PETSC_VERSION_MINOR >= 7)
PetscOptionsView(PETSC_NULL, viewer);
#else
PetscOptionsView(viewer);
#endif
petsc::options_view(viewer);
PetscViewerDestroy(&viewer);
}
......@@ -333,11 +330,7 @@ namespace AMDiS { namespace Parallel {
// === Create solver for the non primal (thus local) variables. ===
KSPCreate(domainComm, &kspInterior);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior());
#else
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(kspInterior, getMatInterior(), getMatInterior());
KSPSetOptionsPrefix(kspInterior, "interior_");
KSPSetType(kspInterior, KSPPREONLY);
KSPGetPC(kspInterior, &pcInterior);
......@@ -460,11 +453,7 @@ namespace AMDiS { namespace Parallel {
TEST_EXIT_DBG(coarseSpaceMap.empty())("Not supported!\n");
MSG("Remove nullspace from rhs vector.\n");
#if (PETSC_VERSION_MINOR >= 5)
MatNullSpaceRemove(matNullspace, getVecRhsInterior());
#else
MatNullSpaceRemove(matNullspace, getVecRhsInterior(), PETSC_NULL);
#endif
petsc::mat_nullspace_remove(matNullspace, getVecRhsInterior());
}
} else {
TEST_EXIT(removeRhsNullspace == false)
......@@ -617,11 +606,7 @@ namespace AMDiS { namespace Parallel {
void PetscSolverGlobalMatrix::initSolver(KSP &ksp)
{
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(ksp, KSPBCGS);
KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
......@@ -897,11 +882,7 @@ namespace AMDiS { namespace Parallel {
if (test) {
Vec tmp;
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(getMatInterior(), &tmp, PETSC_NULL);
#else
MatGetVecs(getMatInterior(), &tmp, PETSC_NULL);
#endif
petsc::mat_create_vecs(getMatInterior(), &tmp, PETSC_NULL);
MatMult(getMatInterior(), nullSpaceBasis, tmp);
PetscReal n;
VecNorm(tmp, NORM_2, &n);
......@@ -914,11 +895,7 @@ namespace AMDiS { namespace Parallel {
MatNullSpaceCreate(domainComm, PETSC_FALSE, 1, &nullSpaceBasis, &matNullSpace);
Mat amat, pmat;
#if (PETSC_VERSION_MINOR >= 5)
KSPGetOperators(ksp, &amat, &pmat);
#else
KSPGetOperators(ksp, &amat, &pmat, PETSC_NULL);
#endif
petsc::ksp_get_operators(ksp, &amat, &pmat);
MatSetNullSpace(amat, matNullSpace);
MatNullSpaceDestroy(&matNullSpace);
......@@ -931,11 +908,7 @@ namespace AMDiS { namespace Parallel {
MatNullSpace matNullSpace;
MatNullSpaceCreate(domainComm, PETSC_TRUE, 0, PETSC_NULL, &matNullSpace);
Mat amat, pmat;
#if (PETSC_VERSION_MINOR >= 5)
KSPGetOperators(ksp, &amat, &pmat);
#else
KSPGetOperators(ksp, &amat, &pmat, PETSC_NULL);
#endif
petsc::ksp_get_operators(ksp, &amat, &pmat);
MatSetNullSpace(amat, matNullSpace);
MatNullSpaceDestroy(&matNullSpace);
}
......
......@@ -184,12 +184,8 @@ namespace AMDiS { namespace Parallel {
// Create FGMRES based outer solver
MSG("CREATE POS 1: %p\n", &ksp);
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCSHELL, getRelative(), getTolerance(), getMaxIterations());
setConstantNullSpace(ksp, componentSpaces[0]->getMesh()->getDim() , true);
}
......@@ -337,12 +333,7 @@ namespace AMDiS { namespace Parallel {
///erstelle kspVelocity
KSPCreate((meshDistributor->getMpiComm(0)), &(matShellContext.kspVelocity));
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat);
#else
KSPSetOperators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(matShellContext.kspVelocity, matShellContext.velocityMat, matShellContext.velocityMat);
///regularisiere LaplaceMatrix
if (regularizeLaplace)
......@@ -351,11 +342,7 @@ namespace AMDiS { namespace Parallel {
rows[0]=0;
MatZeroRows(laplaceMatrixSolver->getMatInterior(), 1, rows, 0, PETSC_NULL, PETSC_NULL);
KSPCreate((meshDistributor->getMpiComm(0)), &(matShellContext.kspLaplace));
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior());
#else
KSPSetOperators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(matShellContext.kspLaplace, laplaceMatrixSolver->getMatInterior(), laplaceMatrixSolver->getMatInterior());
}
else
{ matShellContext.kspLaplace=laplaceMatrixSolver->getSolver();
......
......@@ -117,15 +117,11 @@ namespace AMDiS { namespace Parallel {
MSG("CREATE POS 1: %p\n", &ksp);
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
if (getInfo() >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (getInfo() >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL);
petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ns_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations());
// Create null space information.
......
......@@ -20,6 +20,7 @@
#include "parallel/PetscSolverSchur.h"
#include "parallel/PetscHelper.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
......@@ -293,11 +294,7 @@ namespace AMDiS { namespace Parallel {
KSPCreate(domainComm, &kspInterior);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior());
#else
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(kspInterior, getMatInterior(), getMatInterior());
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetFromOptions(kspInterior);
......
......@@ -47,30 +47,30 @@ namespace AMDiS {
if (params.matSolverPackage[kspSolver]) {
// direct solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_type lu").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + (kspSolver != "direct" ? kspSolver : "umfpack")).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str());
petsc::options_insert_string(("-" + kspPrefix + "pc_type lu").c_str());
petsc::options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + (kspSolver != "direct" ? kspSolver : "umfpack")).c_str());
oem.setMaxIterations(1);
zeroStartVector = true;
matSolverPackage = true;
} else if (!params.emptyParam[kspSolver]) {
// other solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str());
}
// set the preconditioner
setPrecon();
if (oem.getInfo() >= 20)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str());
else if (oem.getInfo() >= 10)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str());
petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str());
// command line string
std::string kspString = "";
Parameters::get(oem.getName() + "->ksp", kspString);
if (kspString != "")
petsc_options_insert_string(kspString.c_str());
petsc::options_insert_string(kspString.c_str());
}
......@@ -128,11 +128,7 @@ namespace AMDiS {
void PetscRunner<M,V>::createSubSolver(KSP &ksp_, Mat m, std::string kspPrefix_)
{
KSPCreate(PETSC_COMM_SELF, &ksp_);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp_, m, m);
#else
KSPSetOperators(ksp_, m, m, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp_, m, m);
KSPSetOptionsPrefix(ksp_, kspPrefix_.c_str());
KSPSetFromOptions(ksp_);
}
......
......@@ -144,16 +144,6 @@ namespace AMDiS {
void operator>>(const PetscVectorNested& dest, VecMap<SystemVector, Mapper>& rhs);
inline PetscErrorCode petsc_options_insert_string(const char in_str[])
{
#if (PETSC_VERSION_MINOR >= 7)
return PetscOptionsInsertString(NULL, in_str);
#else
return PetscOptionsInsertString(in_str);
#endif
}
} // end namespace AMDiS
#include "solver/PetscTypes.hh"
......
......@@ -68,11 +68,7 @@ namespace AMDiS { namespace Parallel {
/// create new solver for S
KSP kspS;
KSPCreate(*data->mpiCommGlobal, &kspS);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(kspS, S, S);
#else
KSPSetOperators(kspS, S, S, SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(kspS, S, S);
petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 5);
{
PC pc;
......@@ -118,15 +114,11 @@ namespace AMDiS { namespace Parallel {
MSG("CREATE POS 1: %p\n", &ksp);
KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5)
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
if (getInfo() >= 10)
KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL