Commit 0ca6954f authored by Thomas Witkowski's avatar Thomas Witkowski

Work on this totaly bull shit navier stokes what ever I have never heard about...

Work on this totaly bull shit navier stokes what ever I have never heard about solver, i will go home and eat one or two cakes.
parent 9cd0a12c
......@@ -30,7 +30,7 @@ namespace AMDiS {
SerUtil::deserialize(in, size);
vectors.resize(size);
for (int i = oldSize; i < size; i++)
vectors[i] = new DOFVector<double>(feSpace[i], "");
vectors[i] = new DOFVector<double>(componentSpaces[i], "");
for (int i = 0; i < size; i++)
vectors[i]->deserialize(in);
}
......
......@@ -37,16 +37,21 @@ namespace AMDiS {
/// Constructor.
SystemVector(std::string name_,
std::vector<const FiniteElemSpace*> feSpace_,
int size)
int size,
bool createVec = false)
: name(name_),
feSpace(feSpace_),
componentSpaces(feSpace_),
vectors(size)
{}
{
if (createVec)
for (int i = 0; i < size; i++)
vectors[i] = new DOFVector<double>(componentSpaces[i], "tmp");
}
/// Copy Constructor.
SystemVector(const SystemVector& rhs)
: name(rhs.getName()),
feSpace(rhs.getFeSpaces()),
componentSpaces(rhs.getFeSpaces()),
vectors(rhs.getNumVectors())
{
for (unsigned int i = 0; i < vectors.size(); i++)
......@@ -111,13 +116,13 @@ namespace AMDiS {
/// Returns the fe space for a given component.
inline const FiniteElemSpace *getFeSpace(int i) const
{
return feSpace[i];
return componentSpaces[i];
}
/// Returns the fe spaces for all components.
inline std::vector<const FiniteElemSpace*> getFeSpaces() const
{
return feSpace;
return componentSpaces;
}
/// Here the system vector is interpreted as one large vector. The given
......@@ -228,7 +233,7 @@ namespace AMDiS {
std::string name;
/// Finite element space.
std::vector<const FiniteElemSpace*> feSpace;
std::vector<const FiniteElemSpace*> componentSpaces;
/// Local dof vectors.
std::vector<DOFVector<double>*> vectors;
......
......@@ -35,8 +35,8 @@ namespace AMDiS {
class BddcMlSolver : public PetscSolver
{
public:
BddcMlSolver()
: PetscSolver(),
BddcMlSolver(string name)
: PetscSolver(name),
rhsVec(NULL),
mat(NULL)
{}
......
......@@ -37,21 +37,21 @@ namespace AMDiS {
Parameters::get(initFileStr, tmp);
if (tmp == "petsc-schur") {
petscSolver = new PetscSolverSchur();
petscSolver = new PetscSolverSchur(initFileStr);
} else if (tmp == "petsc-feti") {
petscSolver = new PetscSolverFeti(initFileStr);
} else if (tmp == "petsc-block") {
petscSolver = new PetscSolverGlobalBlockMatrix();
petscSolver = new PetscSolverGlobalBlockMatrix(initFileStr);
} else if (tmp == "petsc") {
petscSolver = new PetscSolverGlobalMatrix();
petscSolver = new PetscSolverGlobalMatrix(initFileStr);
} else if (tmp == "bddcml") {
#ifdef HAVE_BDDC_ML
petscSolver = new BddcMlSolver();
petscSolver = new BddcMlSolver(initFileStr);
#else
ERROR_EXIT("AMDiS was compiled without BDDC-ML support!\n");
#endif
} else if (tmp == "petsc-stokes") {
petscSolver = new PetscSolverNavierStokes();
petscSolver = new PetscSolverNavierStokes(initFileStr);
} else {
ERROR_EXIT("No parallel solver %s available!\n", tmp.c_str());
}
......
......@@ -20,8 +20,9 @@ namespace AMDiS {
using namespace std;
PetscSolver::PetscSolver()
PetscSolver::PetscSolver(string name)
: ParallelCoarseSpaceMatVec(),
initFileStr(name),
dofMap(FESPACE_WISE, true),
dofMapSd(FESPACE_WISE, true),
kspPrefix(""),
......
......@@ -50,7 +50,7 @@ namespace AMDiS {
class PetscSolver : public ParallelCoarseSpaceMatVec
{
public:
PetscSolver();
PetscSolver(string name);
virtual ~PetscSolver() {}
......@@ -145,6 +145,11 @@ namespace AMDiS {
handleDirichletRows = b;
}
ParallelDofMapping& getDofMap()
{
return dofMap;
}
protected:
/** \brief
* Copies between to PETSc vectors by using different index sets for the
......@@ -163,6 +168,9 @@ namespace AMDiS {
/// Run test, if matrix is symmetric.
bool testMatrixSymmetric(Mat mat, bool advancedTest = false);
protected:
/// Prefix string for parameters in init file.
string initFileStr;
/// FE spaces of all components for the stationary problem the specific
/// solver object is registered to.
vector<const FiniteElemSpace*> componentSpaces;
......
......@@ -29,8 +29,7 @@ namespace AMDiS {
using namespace std;
PetscSolverFeti::PetscSolverFeti(string name)
: PetscSolver(),
initFileStr(name),
: PetscSolver(name),
primalDofMap(COMPONENT_WISE),
dualDofMap(COMPONENT_WISE),
interfaceDofMap(COMPONENT_WISE),
......@@ -108,7 +107,7 @@ namespace AMDiS {
MeshLevelData& levelData = meshDistributor->getMeshLevelData();
if (subdomain == NULL) {
subdomain = new PetscSolverGlobalMatrix();
subdomain = new PetscSolverGlobalMatrix("");
subdomain->setSymmetric(isSymmetric);
subdomain->setHandleDirichletRows(false);
......@@ -1302,7 +1301,7 @@ namespace AMDiS {
massMatrix.assembleOperator(op);
if (!massMatrixSolver) {
massMatrixSolver = new PetscSolverGlobalMatrix;
massMatrixSolver = new PetscSolverGlobalMatrix("");
massMatrixSolver->setKspPrefix("mass_");
massMatrixSolver->setMeshDistributor(meshDistributor,
mpiCommGlobal,
......
......@@ -230,9 +230,6 @@ namespace AMDiS {
}
protected:
/// Prefix string for parameters in init file.
string initFileStr;
/// Mapping from primal DOF indices to a global index of primals.
ParallelDofMapping primalDofMap;
......
......@@ -34,8 +34,8 @@ namespace AMDiS {
class PetscSolverGlobalBlockMatrix : public PetscSolver
{
public:
PetscSolverGlobalBlockMatrix()
: PetscSolver(),
PetscSolverGlobalBlockMatrix(string name)
: PetscSolver(name),
nComponents(0),
nBlocks(-1)
{}
......
......@@ -321,8 +321,7 @@ namespace AMDiS {
if (nullspace.size() > 0) {
VecDuplicate(getVecSolInterior(), &nullspaceBasis);
for (int i = 0; i < nComponents; i++)
setDofVector(nullspaceBasis, nullspace[0]->getDOFVector(i), i, true);
setDofVector(nullspaceBasis, *(nullspace[0]), true);
VecAssemblyBegin(nullspaceBasis);
VecAssemblyEnd(nullspaceBasis);
......@@ -882,4 +881,27 @@ namespace AMDiS {
}
}
PetscSolver* PetscSolverGlobalMatrix::createSubSolver(int component,
string kspPrefix)
{
FUNCNAME("PetscSolverGlobalMatrix::createSubSolver()");
vector<const FiniteElemSpace*> fe;
fe.push_back(componentSpaces[component]);
PetscSolver* subSolver = new PetscSolverGlobalMatrix("");
subSolver->setKspPrefix(kspPrefix.c_str());
subSolver->setMeshDistributor(meshDistributor,
mpiCommGlobal,
mpiCommLocal);
subSolver->init(fe, fe);
ParallelDofMapping &subDofMap = subSolver->getDofMap();
subDofMap[0] = dofMap[component];
subDofMap.update();
return subSolver;
}
}
......@@ -39,8 +39,8 @@ namespace AMDiS {
class PetscSolverGlobalMatrix : public PetscSolver
{
public:
PetscSolverGlobalMatrix()
: PetscSolver(),
PetscSolverGlobalMatrix(string name)
: PetscSolver(name),
zeroStartVector(false),
printMatInfo(false)
{
......@@ -88,6 +88,14 @@ namespace AMDiS {
*/
void createFieldSplit(PC pc, string splitName, vector<int> &components);
/// Wrapper to create field split from only one component.
void createFieldSplit(PC pc, string splitName, int component)
{
vector<int> components;
components.push_back(component);
createFieldSplit(pc, splitName, components);
}
virtual void initSolver(KSP &ksp);
virtual void exitSolver(KSP ksp);
......@@ -112,6 +120,15 @@ namespace AMDiS {
setDofVector(vecInterior, PETSC_NULL, vec, nRowVec, rankOnly);
}
inline void setDofVector(Vec vecInterior,
SystemVector &vec,
bool rankOnly = false)
{
for (int i = 0; i < vec.getSize(); i++)
setDofVector(vecInterior, PETSC_NULL, vec.getDOFVector(i), i, rankOnly);
}
PetscSolver* createSubSolver(int component, string kspPrefix);
protected:
bool zeroStartVector;
......
......@@ -17,13 +17,70 @@ namespace AMDiS {
using namespace std;
int petscMultNavierStokesSchur(Mat mat, Vec x, Vec y)
{
void *ctx;
MatShellGetContext(mat, &ctx);
MatShellNavierStokesSchur* data = static_cast<MatShellNavierStokesSchur*>(ctx);
switch (data->solverMode) {
case 0:
{
Vec vec0, vec1;
VecDuplicate(x, &vec0);
MatGetVecs(data->A00, &vec1, PETSC_NULL);
MatMult(data->A11, x, y);
MatMult(data->A01, x, vec1);
KSPSolve(data->kspVelocity, vec1, vec1);
MatMult(data->A10, vec1, y);
VecAYPX(y, -1.0, vec0);
VecDestroy(&vec0);
VecDestroy(&vec1);
}
break;
case 1:
{
Vec vec0, vec1;
VecDuplicate(x, &vec0);
VecDuplicate(x, &vec1);
KSPSolve(data->kspLaplace, x, vec0);
MatMult(data->matConDif, vec0, vec1);
KSPSolve(data->kspMass, vec1, y);
VecDestroy(&vec0);
VecDestroy(&vec1);
}
break;
default:
ERROR_EXIT("Wrong solver mode %d\n", data->solverMode);
}
}
PetscSolverNavierStokes::PetscSolverNavierStokes(string name)
: PetscSolverGlobalMatrix(name),
pressureComponent(-1),
massMatrixSolver(NULL),
laplaceMatrixSolver(NULL)
{
Parameters::get(initFileStr + "->stokes->pressure component",
pressureComponent);
TEST_EXIT(pressureComponent >= 0)
("For using PETSc stokes solver you must define a pressure component!\n");
TEST_EXIT(pressureComponent == 2)("Fixed for pressure component = 2\n");
}
void PetscSolverNavierStokes::initSolver(KSP &ksp)
{
FUNCNAME("PetscSolverNavierStokes::initSolver()");
MSG("RUN NAVIER STOKES SOLVER INIT!\n");
KSPCreate(mpiCommGlobal, &ksp);
KSPSetOperators(ksp, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
......@@ -31,7 +88,25 @@ namespace AMDiS {
KSPSetType(ksp, KSPGMRES);
KSPSetOptionsPrefix(ksp, "ns_");
KSPSetFromOptions(ksp);
KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL);
KSPMonitorSet(ksp, KSPMonitorTrueResidualNorm, PETSC_NULL, PETSC_NULL);
// === Create null space information. ===
Vec nullSpaceBasis;
VecDuplicate(getVecSolInterior(), &nullSpaceBasis);
SystemVector basisVec("tmp", componentSpaces, componentSpaces.size(), true);
basisVec.set(0.0);
basisVec.getDOFVector(pressureComponent)->set(1.0);
setDofVector(nullSpaceBasis, basisVec, true);
VecAssemblyBegin(nullSpaceBasis);
VecAssemblyEnd(nullSpaceBasis);
VecNormalize(nullSpaceBasis, PETSC_NULL);
MatNullSpace matNullSpace;
MatNullSpaceCreate(mpiCommGlobal, PETSC_FALSE, 1, &nullSpaceBasis, &matNullSpace);
}
......@@ -39,13 +114,9 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverNavierStokes::initPreconditioner()");
MSG("RUN NAVIER STOKES PRECONDITIONER INIT!\n");
vector<int> velocityComponents;
velocityComponents.push_back(0);
velocityComponents.push_back(1);
vector<int> pressureComponent;
pressureComponent.push_back(2);
PCSetType(pc, PCFIELDSPLIT);
PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR);
......@@ -59,42 +130,126 @@ namespace AMDiS {
int nSubKsp;
PCFieldSplitGetSubKSP(pc, &nSubKsp, &subKsp);
TEST_EXIT(nSubKsp == 2)("Wrong numer of KSPs inside of the fieldsplit preconditioner!\n");
TEST_EXIT(nSubKsp == 2)
("Wrong numer of KSPs inside of the fieldsplit preconditioner!\n");
KSP velocityKsp = subKsp[0];
KSP schurKsp = subKsp[1];
KSP kspVelocity = subKsp[0];
KSP kspSchur = subKsp[1];
PetscFree(subKsp);
Mat A00, A01, A10, A11;
PCFieldSplitGetSchurBlocks(pc, &A00, &A01, &A10, &A11);
matShellContext.A00 = A00;
matShellContext.A01 = A01;
matShellContext.A10 = A10;
matShellContext.A11 = A11;
matShellContext.kspVelocity = kspVelocity;
Mat matShell;
MatDuplicate(A11, MAT_DO_NOT_COPY_VALUES, &matShell);
MatSetType(matShell, MATSHELL);
MatShellSetContext(matShell, &matShellContext);
MatShellSetOperation(matShell, MATOP_MULT, (void(*)(void))petscMultNavierStokesSchur);
MatSetUp(matShell);
KSPSetType(kspVelocity, KSPPREONLY);
PC pcSub;
KSPGetPC(kspVelocity, &pcSub);
PCSetType(pcSub, PCLU);
PCFactorSetMatSolverPackage(pcSub, MATSOLVERMUMPS);
MatInfo minfo;
PetscInt nRow, nCol;
MatGetSize(A00, &nRow, &nCol);
MatGetInfo(A00, MAT_GLOBAL_SUM, &minfo);
MSG("A00: %d x %d with %d nnz\n", nRow, nCol, static_cast<int>(minfo.nz_used));
MatGetSize(A01, &nRow, &nCol);
MatGetInfo(A01, MAT_GLOBAL_SUM, &minfo);
MSG("A01: %d x %d with %d nnz\n", nRow, nCol, static_cast<int>(minfo.nz_used));
KSPSetType(kspSchur, KSPGMRES);
KSPSetTolerances(kspSchur, 0, 1e-14, 1e+3, 1000);
KSPSetOperators(kspSchur, matShell, matShell, SAME_NONZERO_PATTERN);
KSPGetPC(kspSchur, &pcSub);
PCSetType(pcSub, PCNONE);
MatGetSize(A10, &nRow, &nCol);
MatGetInfo(A10, MAT_GLOBAL_SUM, &minfo);
MSG("A10: %d x %d with %d nnz\n", nRow, nCol, static_cast<int>(minfo.nz_used));
MatGetSize(A11, &nRow, &nCol);
MatGetInfo(A11, MAT_GLOBAL_SUM, &minfo);
MSG("A11: %d x %d with %d nnz\n", nRow, nCol, static_cast<int>(minfo.nz_used));
// === Mass matrix solver ===
KSPSetType(velocityKsp, KSPPREONLY);
PC pcSub;
KSPGetPC(velocityKsp, &pcSub);
MSG("Initialize mass matrix solver ...\n");
const FiniteElemSpace *pressureFeSpace = componentSpaces[pressureComponent];
DOFMatrix massMatrix(pressureFeSpace, pressureFeSpace);
Operator massOp(pressureFeSpace, pressureFeSpace);
Simple_ZOT zot;
massOp.addTerm(&zot);
massMatrix.assembleOperator(massOp);
massMatrixSolver = createSubSolver(pressureComponent, "mass_");
massMatrixSolver->fillPetscMatrix(&massMatrix);
MSG("... OK!\n");
// === Laplace matrix solver ===
MSG("Initialize laplace matrix solver ...\n");
DOFMatrix laplaceMatrix(pressureFeSpace, pressureFeSpace);
Operator laplaceOp(pressureFeSpace, pressureFeSpace);
Simple_SOT sot;
laplaceOp.addTerm(&sot);
laplaceMatrix.assembleOperator(laplaceOp);
laplaceMatrixSolver = createSubSolver(pressureComponent, "laplace_");
laplaceMatrixSolver->fillPetscMatrix(&laplaceMatrix);
MSG("... OK!\n");
// === Create convection-diffusion operator ===
MSG("Initialize pressure convection-diffusion operator ...\n");
double timestep = 1.0;
Parameters::get("navierstokes->adapt->timestep", timestep);
timestep = 1.0 / timestep;
double nu = 0.01;
Parameters::get("nu", nu);
DOFMatrix conDifMatrix(pressureFeSpace, pressureFeSpace);
Operator conDifOp(pressureFeSpace, pressureFeSpace);
Simple_ZOT conDif0(timestep);
conDifOp.addTerm(&conDif0);
Simple_SOT conDif1(nu);
conDifOp.addTerm(&conDif1);
Vector_FOT conDif2(0);
// conDifOp.addTerm(&conDif2, GRD_PHI);
Vector_FOT conDif3(1);
// conDifOp.addTerm(&conDif3, GRD_PHI);
conDifMatrix.assembleOperator(conDifOp);
conDifMatrixSolver = createSubSolver(pressureComponent, "condif_");
conDifMatrixSolver->fillPetscMatrix(&conDifMatrix);
MSG("... OK!\n");
matShellContext.kspMass = massMatrixSolver->getSolver();
matShellContext.kspLaplace = laplaceMatrixSolver->getSolver();
matShellContext.matConDif = conDifMatrixSolver->getMatInterior();
KSPSetType(matShellContext.kspMass, KSPRICHARDSON);
KSPSetTolerances(matShellContext.kspMass, 0, 1e-13, 1e+3, 1);
KSPGetPC(matShellContext.kspMass, &pcSub);
PCSetType(pcSub, PCLU);
PCFactorSetMatSolverPackage(pcSub, MATSOLVERMUMPS);
KSPSetFromOptions(matShellContext.kspMass);
KSPSetType(matShellContext.kspLaplace, KSPCG);
KSPSetTolerances(matShellContext.kspLaplace, 0, 1e-13, 1e+3, 10000);
KSPGetPC(matShellContext.kspLaplace, &pcSub);
PCSetType(pcSub, PCJACOBI);
KSPSetFromOptions(matShellContext.kspLaplace);
matShellContext.solverMode = 1;
KSPView(velocityKsp, PETSC_VIEWER_STDOUT_WORLD);
KSPView(schurKsp, PETSC_VIEWER_STDOUT_WORLD);
KSPSetFromOptions(kspVelocity);
KSPSetFromOptions(kspSchur);
}
}
......@@ -29,17 +29,39 @@ namespace AMDiS {
using namespace std;
struct MatShellNavierStokesSchur {
int solverMode;
// === Data for mode 0 (solve the schur complement directly) ===
Mat A00, A01, A10, A11;
KSP kspVelocity;
// === Data for mode 1 ===
KSP kspMass, kspLaplace;
Mat matConDif;
};
class PetscSolverNavierStokes : public PetscSolverGlobalMatrix
{
public:
PetscSolverNavierStokes()
: PetscSolverGlobalMatrix()
{}
PetscSolverNavierStokes(string name);
protected:
void initSolver(KSP &ksp);
void initPreconditioner(PC pc);
private:
int pressureComponent;
PetscSolver *massMatrixSolver, *laplaceMatrixSolver, *conDifMatrixSolver;
MatShellNavierStokesSchur matShellContext;
};
}
......
......@@ -33,8 +33,8 @@ namespace AMDiS {
class PetscSolverSchur : public PetscSolver
{
public:
PetscSolverSchur()
: PetscSolver()
PetscSolverSchur(string name)
: PetscSolver(name)
{}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment