Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 35eda406 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Added framework for a PETSc based navier stokes solver. Still must, or...

Added framework for a PETSc based navier stokes solver. Still must, or Sebastian should do it, implement the specific algorithm.
parent 06c5b791
...@@ -267,6 +267,7 @@ if(ENABLE_PARALLEL_DOMAIN) ...@@ -267,6 +267,7 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc ${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc ${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc ${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverNavierStokes.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc) ${SOURCE_DIR}/parallel/PetscSolverSchur.cc)
elseif(ENABLE_PARALLEL_DOMAIN STREQUAL "PMTL") elseif(ENABLE_PARALLEL_DOMAIN STREQUAL "PMTL")
set(MTL_INCLUDE_DIR "") set(MTL_INCLUDE_DIR "")
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include "parallel/PetscSolver.h" #include "parallel/PetscSolver.h"
#include "parallel/MpiHelper.h" #include "parallel/MpiHelper.h"
#include "parallel/BddcMlSolver.h" #include "parallel/BddcMlSolver.h"
#include "parallel/PetscSolverNavierStokes.h"
namespace AMDiS { namespace AMDiS {
...@@ -48,6 +49,8 @@ namespace AMDiS { ...@@ -48,6 +49,8 @@ namespace AMDiS {
#else #else
ERROR_EXIT("AMDiS was compiled without BDDC-ML support!\n"); ERROR_EXIT("AMDiS was compiled without BDDC-ML support!\n");
#endif #endif
} else if (tmp == "petsc-stokes") {
petscSolver = new PetscSolverNavierStokes();
} else { } else {
ERROR_EXIT("No parallel solver %s available!\n", tmp.c_str()); ERROR_EXIT("No parallel solver %s available!\n", tmp.c_str());
} }
......
...@@ -53,6 +53,8 @@ namespace AMDiS { ...@@ -53,6 +53,8 @@ namespace AMDiS {
#endif #endif
matAssembly(); matAssembly();
removeDirichletRows(seqMat, dofMap, getMatInterior());
if (printMatInfo) { if (printMatInfo) {
MatInfo matInfo; MatInfo matInfo;
...@@ -65,24 +67,13 @@ namespace AMDiS { ...@@ -65,24 +67,13 @@ namespace AMDiS {
MSG(" nz unneeded: %d\n", static_cast<int>(matInfo.nz_unneeded)); MSG(" nz unneeded: %d\n", static_cast<int>(matInfo.nz_unneeded));
} }
// === Init PETSc solver. ===
KSPCreate(mpiCommGlobal, &kspInterior); // === Init PETSc solver and preconditioner objects. ===
KSPGetPC(kspInterior, &pcInterior);
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(kspInterior, KSPBCGS);
KSPSetOptionsPrefix(kspInterior, kspPrefix.c_str());
KSPSetFromOptions(kspInterior);
initSolver(kspInterior);
KSPGetPC(kspInterior, &pcInterior);
initPreconditioner(pcInterior); initPreconditioner(pcInterior);
// Do not delete the solution vector, use it for the initial guess.
if (!zeroStartVector)
KSPSetInitialGuessNonzero(kspInterior, PETSC_TRUE);
removeDirichletRows(seqMat, dofMap, getMatInterior());
#if (DEBUG != 0) #if (DEBUG != 0)
MSG("Fill petsc matrix 3 needed %.5f seconds\n", MPI::Wtime() - wtime); MSG("Fill petsc matrix 3 needed %.5f seconds\n", MPI::Wtime() - wtime);
...@@ -450,11 +441,11 @@ namespace AMDiS { ...@@ -450,11 +441,11 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverGlobalMatrix::destroyMatrixData()"); FUNCNAME("PetscSolverGlobalMatrix::destroyMatrixData()");
exitPreconditioner(pcInterior);
matDestroy(); matDestroy();
KSPDestroy(&kspInterior); exitPreconditioner(pcInterior);
exitSolver(kspInterior);
} }
...@@ -596,14 +587,50 @@ namespace AMDiS { ...@@ -596,14 +587,50 @@ namespace AMDiS {
isNames[i].c_str()); isNames[i].c_str());
} }
IS is; createFieldSplit(pc, isNames[i], blockComponents);
interiorMap->createIndexSet(is, blockComponents[0], nComponents);
PCFieldSplitSetIS(pc, isNames[i].c_str(), is);
ISDestroy(&is);
} }
} }
void PetscSolverGlobalMatrix::createFieldSplit(PC pc,
string splitName,
vector<int> &components)
{
FUNCNAME("PetscSolverGlobalMatrix::createFieldSplit()");
IS is;
interiorMap->createIndexSet(is, components[0], components.size());
PCFieldSplitSetIS(pc, isNames[i].c_str(), is);
ISDestroy(&is);
}
void PetscSolverGlobalMatrix::initSolver(KSP ksp)
{
FUNCNAME("PetscSolverGlobalMatrix::initSolver()");
KSPCreate(mpiCommGlobal, &ksp);
KSPSetOperators(ksp, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(ksp, KSPBCGS);
KSPSetOptionsPrefix(ksp, kspPrefix.c_str());
KSPSetFromOptions(ksp);
// Do not delete the solution vector, use it for the initial guess.
if (!zeroStartVector)
KSPSetInitialGuessNonzero(ksp, PETSC_TRUE);
}
void PetscSolverGlobalMatrix::exitSolver(KSP ksp)
{
FUNCNAME("PetscSolverGlobalMatrix::exitSolver()");
KSPDestroy(&ksp);
}
void PetscSolverGlobalMatrix::initPreconditioner(PC pc) void PetscSolverGlobalMatrix::initPreconditioner(PC pc)
{ {
FUNCNAME("PetscSolverGlobalMatrix::initPreconditioner()"); FUNCNAME("PetscSolverGlobalMatrix::initPreconditioner()");
......
...@@ -71,8 +71,27 @@ namespace AMDiS { ...@@ -71,8 +71,27 @@ namespace AMDiS {
ParallelDofMapping &dofMap, ParallelDofMapping &dofMap,
Vec mpiVec); Vec mpiVec);
/// Reads field split information and creats a splitting based on
/// component numbers.
void createFieldSplit(PC pc); void createFieldSplit(PC pc);
/** \brief
* Creates a new field split for a preconditioner object.
*
* \param[in] pc PETSc preconditioner object, must be of
* type PCFIELDSPLIT
* \param[in] splitName Name of the field split, can be used to set other
* parameters of the PCFIELDSPLIT preconditioner on
* the command line.
* \param[in] components System component numbers of the field split. At
* the moment only continuous splits are allowed.
*/
void createFieldSplit(PC pc, string splitName, vector<int> &components);
virtual void initSolver(KSP ksp);
virtual void exitSolver(KSP ksp);
virtual void initPreconditioner(PC pc); virtual void initPreconditioner(PC pc);
virtual void exitPreconditioner(PC pc); virtual void exitPreconditioner(PC pc);
......
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
#include "parallel/PetscSolverNavierStokes.h"
namespace AMDiS {
using namespace std;
void PetscSolverNavierStokes::initSolver(KSP ksp)
{
FUNCNAME("PetscSolverNavierStokes::initSolver()");
MSG("RUN NAVIER STOKES SOLVER INIT!\n");
KSPCreate(mpiCommGlobal, &ksp);
KSPSetOperators(ksp, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
KSPSetTolerances(ksp, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(ksp, KSPGMRES);
KSPSetOptionsPrefix(ksp, "ns_");
KSPSetFromOptions(ksp);
KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL);
}
void PetscSolverNavierStokes::initPreconditioner(PC pc)
{
FUNCNAME("PetscSolverNavierStokes::initPreconditioner()");
MSG("RUN NAVIER STOKES PRECONDITIONER INIT!\n");
vector<int> velocityComponents;
velocityComponents.push_back(0);
velocityComponents.push_back(1);
vector<int> pressureComponent;
pressureComponent.push_back(2);
PCSetType(pc, PCFIELDSPLIT);
PCFieldSplitSetSchurFactType(pc, PC_FIELDSPLIT_SCHUR_FACT_FULL);
createFieldSplit(pc, "velocity", velocityComponents);
createFieldSplit(pc, "pressure", pressureComponent);
}
}
// ============================================================================
// == ==
// == AMDiS - Adaptive multidimensional simulations ==
// == ==
// == http://www.amdis-fem.org ==
// == ==
// ============================================================================
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
/** \file PetscSolverNavierStokes.h */
#ifndef AMDIS_PETSC_SOLVER_NAVIER_STOKES_H
#define AMDIS_PETSC_SOLVER_NAVIER_STOKES_H
#include "parallel/PetscSolverGlobalMatrix.h"
namespace AMDiS {
using namespace std;
class PetscSolverNavierStokes : public PetscSolverGlobalMatrix
{
public:
PetscSolverNavierStokes()
: PetscSolverGlobalMatrix()
{}
protected:
void initSolver(KSP ksp);
void initPreconditioner(PC pc);
};
}
#endif
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment