Liebe Gitlab-Nutzerin, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit f024b3cb authored by Praetorius, Simon's avatar Praetorius, Simon
Browse files

petsc api changes put into separate namespace

parent 8c4548a8
...@@ -304,16 +304,12 @@ namespace AMDiS ...@@ -304,16 +304,12 @@ namespace AMDiS
PCSetFromOptions(pc); PCSetFromOptions(pc);
#ifndef NDEBUG #ifndef NDEBUG
MSG("PetscOptionsView:\n"); MSG("PetscOptionsView:\n");
PetscViewer viewer; PetscViewer viewer;
PetscViewerCreate(PETSC_COMM_WORLD, &viewer); PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
PetscViewerSetType(viewer, PETSCVIEWERASCII); PetscViewerSetType(viewer, PETSCVIEWERASCII);
#if (PETSC_VERSION_MINOR >= 7) petsc::options_view(viewer);
PetscOptionsView(PETSC_NULL, viewer); PetscViewerDestroy(&viewer);
#else
PetscOptionsView(viewer);
#endif
PetscViewerDestroy(&viewer);
#endif #endif
} }
...@@ -334,19 +330,15 @@ namespace AMDiS ...@@ -334,19 +330,15 @@ namespace AMDiS
void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix, int info) void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix, int info)
{ {
KSPCreate(comm, &ksp); KSPCreate(comm, &ksp);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp, m, m);
KSPSetOperators(ksp, m, m);
#else
KSPSetOperators(ksp, m, m, SAME_NONZERO_PATTERN);
#endif
KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(ksp, KSPBCGS); KSPSetType(ksp, KSPBCGS);
KSPSetOptionsPrefix(ksp, kspPrefix.c_str()); KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
if (info >= 10) if (info >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (info >= 20) else if (info >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
} }
} // end namespace petsc_helper } // end namespace petsc_helper
......
...@@ -31,13 +31,6 @@ ...@@ -31,13 +31,6 @@
#include <petsc.h> #include <petsc.h>
#include "AMDiS_fwd.h" #include "AMDiS_fwd.h"
#if (PETSC_VERSION_MINOR >= 7)
#define PETSC_MONITOR_CAST(...) \
(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))(__VA_ARGS__)
#else
#define PETSC_MONITOR_CAST(...) __VA_ARGS__
#endif
namespace AMDiS namespace AMDiS
{ {
namespace Parallel namespace Parallel
...@@ -126,7 +119,82 @@ namespace AMDiS ...@@ -126,7 +119,82 @@ namespace AMDiS
void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0); void createSolver(MPI::Intracomm comm, KSP &ksp, Mat m, std::string kspPrefix = "", int info = 0);
} // end namespace petsc_helper } // end namespace petsc_helper
} // end namespace Parallel } // end namespace Parallel
// functions for PETSc API changes
namespace petsc
{
inline PetscErrorCode options_view(etscViewer viewer)
{
#if (PETSC_VERSION_MINOR >= 7)
return PetscOptionsView(PETSC_NULL, PetscViewer);
#else
return PetscOptionsView(PetscViewer);
#endif
}
inline PetscErrorCode options_insert_string(const char in_str[])
{
#if (PETSC_VERSION_MINOR >= 7)
return PetscOptionsInsertString(PETSC_NULL, in_str);
#else
return PetscOptionsInsertString(in_str);
#endif
}
inline PetscErrorCode ksp_set_operators(KSP ksp, Mat Amat,Mat Pmat)
{
#if (PETSC_VERSION_MINOR >= 5)
return KSPSetOperators(ksp, Amat, Pmat);
#else
return KSPSetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
}
inline PetscErrorCode ksp_get_operators(KSP ksp, Mat Amat,Mat Pmat)
{
#if (PETSC_VERSION_MINOR >= 5)
return KSPGetOperators(ksp, Amat, Pmat);
#else
return KSPGetOperators(ksp, Amat, Pmat, SAME_NONZERO_PATTERN);
#endif
}
template <class Monitor>
inline PetscErrorCode ksp_monitor_set(KSP ksp, Monitor monitor)
{
#if (PETSC_VERSION_MINOR >= 7)
PetscViewerAndFormat *vf;
PetscErrorCode ierr;
ierr = PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,&vf);CHKERRQ(ierr);
ierr = KSPMonitorSet(ksp,(PetscErrorCode (*)(KSP,PetscInt,PetscReal,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr);
return ierr;
#else
return KSPMonitorSet(ksp, monitor, PETSC_NULL, PETSC_NULL);
#endif
}
inline PetscErrorCode mat_create_vecs(Mat mat,Vec *right,Vec *left)
{
#if (PETSC_VERSION_MINOR >= 6)
return MatCreateVecs(mat, right, left);
#else
return MatGetVecs(mat, right, left);
#endif
}
inline PetscErrorCode mat_nullspace_remove(MatNullSpace sp,Vec vec)
{
#if (PETSC_VERSION_MINOR >= 5)
return MatNullSpaceRemove(sp, vec);
#else
return MatNullSpaceRemove(sp, vec, PETSC_NULL);
#endif
}
} // end namespace petsc
} // end namespace AMDiS } // end namespace AMDiS
#endif #endif
...@@ -49,7 +49,7 @@ namespace AMDiS { namespace Parallel { ...@@ -49,7 +49,7 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->ksp", kspStr); Parameters::get(name + "->ksp", kspStr);
if (kspStr != "") if (kspStr != "")
petsc_options_insert_string(kspStr.c_str()); petsc::options_insert_string(kspStr.c_str());
Parameters::get(name + "->remove rhs null space", removeRhsNullspace); Parameters::get(name + "->remove rhs null space", removeRhsNullspace);
Parameters::get(name + "->has constant null space", hasConstantNullspace); Parameters::get(name + "->has constant null space", hasConstantNullspace);
......
...@@ -85,15 +85,11 @@ namespace AMDiS { namespace Parallel { ...@@ -85,15 +85,11 @@ namespace AMDiS { namespace Parallel {
{ {
// Create FGMRES based outer solver // Create FGMRES based outer solver
KSPCreate(meshDistributor->getMpiComm(0), &ksp); KSPCreate(meshDistributor->getMpiComm(0), &ksp);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
if (getInfo() >= 10) if (getInfo() >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (getInfo() >= 20) else if (getInfo() >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations());
KSPSetFromOptions(ksp); KSPSetFromOptions(ksp);
......
...@@ -67,11 +67,7 @@ namespace AMDiS { namespace Parallel { ...@@ -67,11 +67,7 @@ namespace AMDiS { namespace Parallel {
/// create new solver for S /// create new solver for S
KSP kspS; KSP kspS;
KSPCreate(*data->mpiCommGlobal, &kspS); KSPCreate(*data->mpiCommGlobal, &kspS);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(kspS, S, S);
KSPSetOperators(kspS, S, S);
#else
KSPSetOperators(kspS, S, S, SAME_NONZERO_PATTERN);
#endif
petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 1); petsc_helper::setSolver(kspS, "S_", KSPFGMRES, PCSHELL, 1e-6, 1e-8, 1);
{ {
PC pc; PC pc;
...@@ -117,15 +113,11 @@ namespace AMDiS { namespace Parallel { ...@@ -117,15 +113,11 @@ namespace AMDiS { namespace Parallel {
MSG("CREATE POS 1: %p\n", &ksp); MSG("CREATE POS 1: %p\n", &ksp);
KSPCreate(domainComm, &ksp); KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
if (getInfo() >= 10) if (getInfo() >= 10)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorDefault), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorDefault);
else if (getInfo() >= 20) else if (getInfo() >= 20)
KSPMonitorSet(ksp, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp, KSPMonitorTrueResidualNorm);
petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations()); petsc_helper::setSolver(ksp, "ch_", KSPFGMRES, PCNONE, getRelative(), getTolerance(), getMaxIterations());
if (useOldInitialGuess) if (useOldInitialGuess)
......
...@@ -1051,11 +1051,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1051,11 +1051,7 @@ namespace AMDiS { namespace Parallel {
} }
KSPCreate(domainComm, &ksp_schur_primal); KSPCreate(domainComm, &ksp_schur_primal);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
#else
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPGMRES); KSPSetType(ksp_schur_primal, KSPGMRES);
KSPSetFromOptions(ksp_schur_primal); KSPSetFromOptions(ksp_schur_primal);
...@@ -1075,11 +1071,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1075,11 +1071,7 @@ namespace AMDiS { namespace Parallel {
// === Create KSP solver object and set appropriate solver options. === // === Create KSP solver object and set appropriate solver options. ===
KSPCreate(domainComm, &ksp_schur_primal); KSPCreate(domainComm, &ksp_schur_primal);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal);
#else
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal, SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_"); KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPPREONLY); KSPSetType(ksp_schur_primal, KSPPREONLY);
PC pc_schur_primal; PC pc_schur_primal;
...@@ -1365,11 +1357,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1365,11 +1357,7 @@ namespace AMDiS { namespace Parallel {
} }
KSPCreate(domainComm, &ksp_feti); KSPCreate(domainComm, &ksp_feti);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti);
KSPSetOperators(ksp_feti, mat_feti, mat_feti);
#else
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp_feti, "feti_"); KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES); KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
...@@ -1382,9 +1370,9 @@ namespace AMDiS { namespace Parallel { ...@@ -1382,9 +1370,9 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->feti->monitor", monitor); Parameters::get(name + "->feti->monitor", monitor);
if (monitor) { if (monitor) {
if (stokesMode) if (stokesMode)
KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorFetiStokes), &fetiKspData, PETSC_NULL); petsc::ksp_monitor_set(ksp_feti, KSPMonitorFetiStokes);
else else
KSPMonitorSet(ksp_feti, PETSC_MONITOR_CAST(KSPMonitorTrueResidualNorm), PETSC_NULL, PETSC_NULL); petsc::ksp_monitor_set(ksp_feti, KSPMonitorTrueResidualNorm);
} }
...@@ -1443,11 +1431,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1443,11 +1431,7 @@ namespace AMDiS { namespace Parallel {
(void(*)(void))petscMultMatFetiInexact); (void(*)(void))petscMultMatFetiInexact);
KSPCreate(domainComm, &ksp_feti); KSPCreate(domainComm, &ksp_feti);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp_feti, mat_feti, mat_feti);
KSPSetOperators(ksp_feti, mat_feti, mat_feti);
#else
KSPSetOperators(ksp_feti, mat_feti, mat_feti, SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp_feti, "feti_"); KSPSetOptionsPrefix(ksp_feti, "feti_");
KSPSetType(ksp_feti, KSPGMRES); KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
...@@ -1464,11 +1448,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1464,11 +1448,7 @@ namespace AMDiS { namespace Parallel {
createVec(localDofMap, fetiInexactPreconData.tmp_vec_b0); createVec(localDofMap, fetiInexactPreconData.tmp_vec_b0);
KSPCreate(domainComm, &(fetiInexactPreconData.ksp_pc_feti)); KSPCreate(domainComm, &(fetiInexactPreconData.ksp_pc_feti));
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange);
KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange);
#else
KSPSetOperators(fetiInexactPreconData.ksp_pc_feti, mat_lagrange, mat_lagrange, SAME_NONZERO_PATTERN);
#endif
KSPGetPC(fetiInexactPreconData.ksp_pc_feti, KSPGetPC(fetiInexactPreconData.ksp_pc_feti,
&(fetiInexactPreconData.pc_feti)); &(fetiInexactPreconData.pc_feti));
createFetiPreconLumped(fetiInexactPreconData.pc_feti); createFetiPreconLumped(fetiInexactPreconData.pc_feti);
...@@ -1505,17 +1485,11 @@ namespace AMDiS { namespace Parallel { ...@@ -1505,17 +1485,11 @@ namespace AMDiS { namespace Parallel {
VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel),
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, &(lumpedData->tmp_vec_b0)); nGlobalOverallInterior, &(lumpedData->tmp_vec_b0));
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(mat_duals_duals, PETSC_NULL, petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals0)); &(lumpedData->tmp_vec_duals0));
MatCreateVecs(mat_duals_duals, PETSC_NULL, petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals1)); &(lumpedData->tmp_vec_duals1));
#else
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(lumpedData->tmp_vec_duals1));
#endif
for (int component = 0; component < static_cast<int>(componentSpaces.size()); for (int component = 0; component < static_cast<int>(componentSpaces.size());
component++) { component++) {
...@@ -1607,11 +1581,7 @@ namespace AMDiS { namespace Parallel { ...@@ -1607,11 +1581,7 @@ namespace AMDiS { namespace Parallel {
("Stokes mode does not yet support the Dirichlet precondition!\n"); ("Stokes mode does not yet support the Dirichlet precondition!\n");
KSPCreate(PETSC_COMM_SELF, &ksp_interior); KSPCreate(PETSC_COMM_SELF, &ksp_interior);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp_interior, mat_interior_interior, mat_interior_interior);
KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior);
#else
KSPSetOperators(ksp_interior, mat_interior_interior, mat_interior_interior, SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp_interior, "precon_interior_"); KSPSetOptionsPrefix(ksp_interior, "precon_interior_");
KSPSetType(ksp_interior, KSPPREONLY); KSPSetType(ksp_interior, KSPPREONLY);
PC pc_interior; PC pc_interior;
...@@ -1635,21 +1605,13 @@ namespace AMDiS { namespace Parallel { ...@@ -1635,21 +1605,13 @@ namespace AMDiS { namespace Parallel {
VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel),
localDofMap.getRankDofs(), localDofMap.getRankDofs(),
nGlobalOverallInterior, &(fetiDirichletPreconData.tmp_vec_b)); nGlobalOverallInterior, &(fetiDirichletPreconData.tmp_vec_b));
#if (PETSC_VERSION_MINOR >= 6)
MatCreateVecs(mat_duals_duals, PETSC_NULL, petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals0));
MatCreateVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals1));
MatCreateVecs(mat_interior_interior, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_interior));
#else
MatGetVecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals0)); &(fetiDirichletPreconData.tmp_vec_duals0));
MatGetVecs(mat_duals_duals, PETSC_NULL, petsc::mat_create_vecs(mat_duals_duals, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_duals1)); &(fetiDirichletPreconData.tmp_vec_duals1));
MatGetVecs(mat_interior_interior, PETSC_NULL, petsc::mat_create_vecs(mat_interior_interior, PETSC_NULL,
&(fetiDirichletPreconData.tmp_vec_interior)); &(fetiDirichletPreconData.tmp_vec_interior));
#endif
TEST_EXIT_DBG(subDomainIsLocal) TEST_EXIT_DBG(subDomainIsLocal)
("Should not happen, check usage of localDofMap!\n"); ("Should not happen, check usage of localDofMap!\n");
...@@ -2509,13 +2471,8 @@ namespace AMDiS { namespace Parallel { ...@@ -2509,13 +2471,8 @@ namespace AMDiS { namespace Parallel {
Vec vecRhs, vecSol; Vec vecRhs, vecSol;
Vec vecRhsLagrange, vecSolLagrange; Vec vecRhsLagrange, vecSolLagrange;
#if (PETSC_VERSION_MINOR >= 6) petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatCreateVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); petsc::mat_create_vecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
MatCreateVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
#else
MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange);
MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange);
#endif
vecRhs = vecRhsLagrange; vecRhs = vecRhsLagrange;
vecSol = vecSolLagrange; vecSol = vecSolLagrange;
......
...@@ -150,11 +150,7 @@ namespace AMDiS { namespace Parallel { ...@@ -150,11 +150,7 @@ namespace AMDiS { namespace Parallel {
FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()"); FUNCNAME("PetscSolverGlobalBlockMatrix::initSolver()");
KSPCreate(domainComm, &ksp); KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetOperators(ksp, getMatInterior(), getMatInterior());
#else
KSPSetOperators(ksp, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(ksp, kspPrefix.c_str()); KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
KSPSetFromOptions(ksp); KSPSetFromOptions(ksp);
} }
......
...@@ -50,15 +50,15 @@ namespace AMDiS { namespace Parallel { ...@@ -50,15 +50,15 @@ namespace AMDiS { namespace Parallel {
if (params.matSolverPackage.find(kspSolver) != params.matSolverPackage.end()) { if (params.matSolverPackage.find(kspSolver) != params.matSolverPackage.end()) {
// direct solvers // direct solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_type preonly").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_type lu").c_str()); petsc::options_insert_string(("-" + kspPrefix + "pc_type lu").c_str());
petsc_options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str()); petsc::options_insert_string(("-" + kspPrefix + "pc_factor_mat_solver_package " + kspSolver).c_str());
setMaxIterations(1); setMaxIterations(1);
zeroStartVector = true; zeroStartVector = true;
matSolverPackage = true; matSolverPackage = true;
} else if (params.emptyParam.find(kspSolver) == params.emptyParam.end() && solverName != "petsc") { } else if (params.emptyParam.find(kspSolver) == params.emptyParam.end() && solverName != "petsc") {
// other solvers // other solvers
petsc_options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_type " + kspSolver).c_str());
} }
// set the preconditioner // set the preconditioner
...@@ -70,17 +70,17 @@ namespace AMDiS { namespace Parallel { ...@@ -70,17 +70,17 @@ namespace AMDiS { namespace Parallel {
Parameters::get(name + "->right precon", precon); Parameters::get(name + "->right precon", precon);
if (!matSolverPackage && params.emptyParam.find(precon) == params.emptyParam.end()) { if (!matSolverPackage && params.emptyParam.find(precon) == params.emptyParam.end()) {
precon = (params.preconMap.find(precon) != params.preconMap.end() ? params.preconMap[precon] : precon); precon = (params.preconMap.find(precon) != params.preconMap.end() ? params.preconMap[precon] : precon);
petsc_options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str()); petsc::options_insert_string(("-" + kspPrefix + "pc_type " + precon).c_str());
} }
petsc_options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_max_it " + boost::lexical_cast<std::string>(getMaxIterations())).c_str());
petsc_options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_rtol " + boost::lexical_cast<std::string>(getRelative())).c_str());
petsc_options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_atol " + boost::lexical_cast<std::string>(getTolerance())).c_str());
if (getInfo() >= 20) if (getInfo() >= 20)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor_true_residual").c_str());
else if (getInfo() >= 10) else if (getInfo() >= 10)
petsc_options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str()); petsc::options_insert_string(("-" + kspPrefix + "ksp_monitor").c_str());
} }
if (!matSolverPackage) { if (!matSolverPackage) {
Parameters::get(name + "->use zero start vector", zeroStartVector); Parameters::get(name + "->use zero start vector", zeroStartVector);
...@@ -95,11 +95,7 @@ namespace AMDiS { namespace Parallel { ...@@ -95,11 +95,7 @@ namespace AMDiS { namespace Parallel {
PetscViewer viewer; PetscViewer viewer;
PetscViewerCreate(PETSC_COMM_WORLD, &viewer); PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
PetscViewerSetType(viewer, PETSCVIEWERASCII); PetscViewerSetType(viewer, PETSCVIEWERASCII);
#if (PETSC_VERSION_MINOR >= 7) petsc::options_view(viewer);
PetscOptionsView(PETSC_NULL, viewer);
#else
PetscOptionsView(viewer);
#endif
PetscViewerDestroy(&viewer); PetscViewerDestroy(&viewer);
} }
...@@ -333,11 +329,7 @@ namespace AMDiS { namespace Parallel { ...@@ -333,11 +329,7 @@ namespace AMDiS { namespace Parallel {
// === Create solver for the non primal (thus local) variables. === // === Create solver for the non primal (thus local) variables. ===
KSPCreate(domainComm, &kspInterior); KSPCreate(domainComm, &kspInterior);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(kspInterior, getMatInterior(), getMatInterior());
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior());
#else
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(), SAME_NONZERO_PATTERN);
#endif
KSPSetOptionsPrefix(kspInterior, "interior_"); KSPSetOptionsPrefix(kspInterior, "interior_");
KSPSetType(kspInterior, KSPPREONLY); KSPSetType(kspInterior, KSPPREONLY);
KSPGetPC(kspInterior, &pcInterior); KSPGetPC(kspInterior, &pcInterior);
...@@ -460,11 +452,7 @@ namespace AMDiS { namespace Parallel { ...@@ -460,11 +452,7 @@ namespace AMDiS { namespace Parallel {
TEST_EXIT_DBG(coarseSpaceMap.empty())("Not supported!\n"); TEST_EXIT_DBG(coarseSpaceMap.empty())("Not supported!\n");
MSG("Remove nullspace from rhs vector.\n"); MSG("Remove nullspace from rhs vector.\n");
#if (PETSC_VERSION_MINOR >= 5) petsc::mat_nullspace_remove(matNullspace, getVecRhsInterior());
MatNullSpaceRemove(matNullspace, getVecRhsInterior());
#else
MatNullSpaceRemove(matNullspace, getVecRhsInterior(), PETSC_NULL);
#endif
} }
} else { } else {
TEST_EXIT(removeRhsNullspace == false) TEST_EXIT(removeRhsNullspace == false)
...@@ -617,11 +605,7 @@ namespace AMDiS { namespace Parallel { ...@@ -617,11 +605,7 @@ namespace AMDiS { namespace Parallel {
void PetscSolverGlobalMatrix::initSolver(KSP &ksp) void PetscSolverGlobalMatrix::initSolver(KSP &ksp)
{ {
KSPCreate(domainComm, &ksp); KSPCreate(domainComm, &ksp);
#if (PETSC_VERSION_MINOR >= 5) petsc::ksp_set_operators(ksp, getMatInterior(), getMatInterior());
KSPSetOperators(ksp, getMatInterior(), getMatInterior());