Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konto der externen Nutzer:innen sind über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab. The administrators

Commit 255c2ef3 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed several problems in Stokes FETI-DP

parent eb4ef7a6
......@@ -243,6 +243,65 @@ namespace AMDiS {
}
void DOFMatrix::assembleOperator(Operator &op)
{
FUNCNAME("DOFMatrix::assembleOperator()");
TEST_EXIT(rowFeSpace->getMesh() == colFeSpace->getMesh())
("This function does not support for multi mesh procedure!\n");
TEST_EXIT(op.getRowFeSpace() == rowFeSpace)
("Row FE spaces do not fit together!\n");
TEST_EXIT(op.getColFeSpace() == colFeSpace)
("Column FE spaces do not fit together!\n");
clearOperators();
addOperator(&op);
matrix.change_dim(rowFeSpace->getAdmin()->getUsedSize(),
colFeSpace->getAdmin()->getUsedSize());
Mesh *mesh = rowFeSpace->getMesh();
mesh->dofCompress();
const BasisFunction *basisFcts = rowFeSpace->getBasisFcts();
Flag assembleFlag = getAssembleFlag() |
Mesh::CALL_LEAF_EL |
Mesh::FILL_COORDS |
Mesh::FILL_DET |
Mesh::FILL_GRD_LAMBDA |
Mesh::FILL_NEIGH |
Mesh::FILL_BOUND;
BoundaryType *bound = new BoundaryType[basisFcts->getNumber()];
calculateNnz();
if (getBoundaryManager())
getBoundaryManager()->initMatrix(this);
startInsertion(getNnz());
TraverseStack stack;
ElInfo *elInfo = stack.traverseFirst(mesh, -1, assembleFlag);
while (elInfo) {
basisFcts->getBound(elInfo, bound);
assemble(1.0, elInfo, bound);
if (getBoundaryManager())
getBoundaryManager()->fillBoundaryConditions(elInfo, this);
elInfo = stack.traverseNext(elInfo);
}
clearDirichletRows();
finishAssembling();
finishInsertion();
getBoundaryManager()->exitMatrix(this);
delete [] bound;
}
double DOFMatrix::logAcc(DegreeOfFreedom a, DegreeOfFreedom b) const
{
return matrix[a][b];
......@@ -396,13 +455,13 @@ namespace AMDiS {
void DOFMatrix::axpy(double a, const DOFMatrix& x, const DOFMatrix& y)
{
matrix+= a * x.matrix + y.matrix;
matrix += a * x.matrix + y.matrix;
}
void DOFMatrix::scal(double b)
{
matrix*= b;
matrix *= b;
}
......@@ -414,6 +473,14 @@ namespace AMDiS {
}
void DOFMatrix::clearOperators()
{
operators.clear();
operatorFactor.clear();
operatorEstFactor.clear();
}
void DOFMatrix::serialize(std::ostream &out)
{
using namespace mtl;
......
......@@ -172,6 +172,9 @@ namespace AMDiS {
double* factor = NULL,
double* estFactor = NULL);
///
void clearOperators();
inline vector<double*>::iterator getOperatorFactorBegin()
{
return operatorFactor.begin();
......@@ -250,6 +253,9 @@ namespace AMDiS {
ElInfo* rowElInfo,
ElInfo* colElInfo);
///
void assembleOperator(Operator &op);
/// That function must be called after the matrix assembling has been
/// finished. This makes it possible to start some cleanup or matrix
/// data compressing procedures.
......
......@@ -263,7 +263,8 @@ namespace AMDiS {
DOFVector<double> *solution,
Mesh *mesh,
Flag assembleFlag);
/** \name getting methods
* \{
*/
......
......@@ -311,7 +311,7 @@ namespace AMDiS {
// matrices, the problem may arise, that the result is larger than the
// number of elements in a row. This is fixed in the following.
if (nRankRows < 100)
if (nRankRows < 100 || nRankCols < 100)
for (int i = 0; i < nRankRows; i++)
dnnz[i] = std::min(dnnz[i], nRankCols);
......
......@@ -197,7 +197,7 @@ namespace AMDiS {
int nColsRankMat = (j == 0 ? nRankInteriorRows : uniqueCoarseMap[j - 1]->getRankDofs());
int nColsOverallMat = (j == 0 ? nOverallInteriorRows : uniqueCoarseMap[j - 1]->getOverallDofs());
MatCreateAIJ(mpiCommGlobal,
nRowsRankMat, nColsRankMat,
nRowsOverallMat, nColsOverallMat,
......
......@@ -38,6 +38,14 @@ namespace AMDiS {
}
void PetscSolver::fillPetscMatrix(DOFMatrix* mat)
{
Matrix<DOFMatrix*> sysMat(1, 1);
sysMat[0][0] = mat;
fillPetscMatrix(&sysMat);
}
void PetscSolver::solve(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solve()");
......
......@@ -62,6 +62,9 @@ namespace AMDiS {
*/
virtual void fillPetscMatrix(Matrix<DOFMatrix*> *mat) = 0;
///
void fillPetscMatrix(DOFMatrix* mat);
/** \brief
* Create a PETSc vector and fills it with the rhs values of the system.
*
......
......@@ -25,28 +25,28 @@ namespace AMDiS {
using namespace std;
PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *dummy)
PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *data)
{
Vec Br,v,w;
Mat A;
Vec Br,v,w;
VecDuplicate(static_cast<FetiKspData*>(data)->draft, &v);
VecDuplicate(static_cast<FetiKspData*>(data)->draft, &w);
KSPGetOperators(ksp, &A, PETSC_NULL, PETSC_NULL);
MatGetVecs(A, &w, &v);
KSPBuildResidual(ksp, v, w, &Br);
Vec nest0, nest1;
VecNestGetSubVec(Br, 0, &nest0);
VecNestGetSubVec(Br, 1, &nest1);
PetscScalar norm0, norm1;
PetscScalar norm, norm0, norm1;
VecNorm(Br, NORM_2, &norm);
VecNorm(nest0, NORM_2, &norm0);
VecNorm(nest1, NORM_2, &norm1);
VecDestroy(&v);
VecDestroy(&w);
PetscPrintf(PETSC_COMM_WORLD, "%3D KSP Component residual norm [%1.12e %1.12e] and relative [%1.12e]\n",
norm0, norm1, rnorm);
PetscPrintf(PETSC_COMM_WORLD, "%3D KSP residual norm %1.12e [%1.12e %1.12e] and preconditioned norm [%1.12e]\n",
n, norm, norm0, norm1, rnorm);
return 0;
}
......@@ -56,6 +56,7 @@ namespace AMDiS {
schurPrimalSolver(0),
multiLevelTest(false),
subdomain(NULL),
massMatrixSolver(NULL),
meshLevel(0),
rStartInterior(0),
nGlobalOverallInterior(0),
......@@ -867,7 +868,8 @@ namespace AMDiS {
KSPSetType(ksp_feti, KSPGMRES);
KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000);
KSPSetFromOptions(ksp_feti);
if (stokesMode)
KSPMonitorSet(ksp_feti, KSPMonitorFeti, &fetiKspData, PETSC_NULL);
......@@ -979,6 +981,8 @@ namespace AMDiS {
}
if (stokesMode) {
// === Create H2 vec ===
DOFVector<double> tmpvec(pressureFeSpace, "tmpvec");
createH2vec(tmpvec);
interfaceDofMap.createVec(fetiInterfaceLumpedPreconData.h2vec);
......@@ -994,19 +998,44 @@ namespace AMDiS {
VecAssemblyBegin(fetiInterfaceLumpedPreconData.h2vec);
VecAssemblyEnd(fetiInterfaceLumpedPreconData.h2vec);
// === Create mass matrix solver ===
if (!massMatrixSolver) {
MSG("START CREATE MASS MATRIX!\n");
DOFMatrix massMatrix(pressureFeSpace, pressureFeSpace);
Operator op(pressureFeSpace, pressureFeSpace);
Simple_ZOT zot;
op.addTerm(&zot);
massMatrix.assembleOperator(op);
ParallelDofMapping massMatMapping = interfaceDofMap;
massMatrixSolver = new PetscSolverGlobalMatrix;
massMatrixSolver->setKspPrefix("mass_");
massMatrixSolver->setMeshDistributor(meshDistributor,
mpiCommGlobal,
mpiCommLocal);
massMatrixSolver->setDofMapping(&massMatMapping);
massMatrixSolver->fillPetscMatrix(&massMatrix);
int r, c;
MatGetSize(massMatrixSolver->getMatInterior(), &r, &c);
MatInfo info;
MatGetInfo(massMatrixSolver->getMatInterior(), MAT_GLOBAL_SUM, &info);
MSG("MASS MAT INFO: size = %d x %d nnz = %d\n",
r, c,
static_cast<int>(info.nz_used));
MSG("END CREATE MASS MATRIX!\n");
fetiInterfaceLumpedPreconData.ksp_mass =
massMatrixSolver->getSolver();
}
// === Create tmp vectors ===
localDofMap.createVec(fetiInterfaceLumpedPreconData.tmp_vec_b1);
primalDofMap.createVec(fetiInterfaceLumpedPreconData.tmp_primal);
fetiInterfaceLumpedPreconData.subSolver = subdomain;
KSPCreate(PETSC_COMM_WORLD, &fetiInterfaceLumpedPreconData.ksp_primal);
KSPSetOperators(fetiInterfaceLumpedPreconData.ksp_primal,
subdomain->getMatCoarse(0, 0),
subdomain->getMatCoarse(0, 0),
SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(fetiInterfaceLumpedPreconData.ksp_primal, "primal_");
KSPSetFromOptions(fetiInterfaceLumpedPreconData.ksp_primal);
}
KSPGetPC(ksp_feti, &precon_feti);
......@@ -1455,7 +1484,7 @@ namespace AMDiS {
void PetscSolverFeti::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverFeti::fillPetscMatrix()");
// === Create all sets and indices. ===
vector<const FiniteElemSpace*> feSpaces = AMDiS::getComponentFeSpaces(*mat);
......@@ -1790,6 +1819,7 @@ namespace AMDiS {
VecAssemblyEnd(vecSol);
}
VecDuplicate(vecSol, &fetiKspData.draft);
// === Create reduced RHS ===
......@@ -2028,10 +2058,12 @@ namespace AMDiS {
meshDistributor->synchAddVector(vec);
double scalePower = 2.0;
Parameters::get("lp->scale power", scalePower);
DOFIterator<double> dofIt(&vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
DegreeOfFreedom d = dofIt.getDOFIndex();
vec[d] = 1.0 / pow(vec[d], 2.0);
vec[d] = 1.0 / pow(vec[d], scalePower);
}
}
......
......@@ -313,6 +313,8 @@ namespace AMDiS {
FetiInterfaceLumpedPreconData fetiInterfaceLumpedPreconData;
FetiKspData fetiKspData;
/// Matrices for Dirichlet preconditioner.
Mat mat_interior_interior, mat_duals_duals, mat_interior_duals, mat_duals_interior;
......@@ -322,6 +324,8 @@ namespace AMDiS {
PetscSolver *subdomain;
PetscSolver *massMatrixSolver;
int meshLevel;
int rStartInterior;
......
......@@ -301,15 +301,21 @@ namespace AMDiS {
VecNestGetSubVec(yvec, 0, &y_interface);
VecNestGetSubVec(yvec, 1, &y_lagrange);
VecCopy(x_interface, y_interface);
double scaleFactor = 1.0;
Parameters::get("lp->scal", scaleFactor);
bool mult = false;
Parameters::get("lp->mult", mult);
if (mult)
VecPointwiseMult(y_interface, x_interface, data->h2vec);
if (scaleFactor != 0.0)
VecScale(y_interface, scaleFactor);
bool useMassMatrix = false;
Parameters::get("lp->mass matrix", useMassMatrix);
if (useMassMatrix) {
KSPSolve(data->ksp_mass, x_interface, y_interface);
} else {
VecCopy(x_interface, y_interface);
double scaleFactor = 1.0;
Parameters::get("lp->scal", scaleFactor);
bool mult = false;
Parameters::get("lp->mult", mult);
if (mult)
VecPointwiseMult(y_interface, x_interface, data->h2vec);
if (scaleFactor != 0.0)
VecScale(y_interface, scaleFactor);
}
MatMultTranspose(*(data->mat_lagrange_scaled), x_lagrange, data->tmp_vec_b0);
......
......@@ -122,16 +122,16 @@ namespace AMDiS {
Vec h2vec;
KSP ksp_primal;
Vec tmp_primal;
KSP ksp_feti_ex;
Mat mat_feti_ex;
KSP ksp_mass;
};
struct FetiKspData {
Vec draft;
};
typedef enum {
FETI_NONE = 0,
FETI_DIRICHLET = 1,
......
......@@ -524,8 +524,6 @@ namespace AMDiS {
const FiniteElemSpace *rowFe = seqMat->getRowFeSpace();
const FiniteElemSpace *colFe = seqMat->getColFeSpace();
DofMap& rowMap = (*interiorMap)[rowFe].getMap();
DofMap& colMap = (*interiorMap)[colFe].getMap();
// === Traverse all rows of the DOF matrix and insert row wise the values ===
// === to the PETSc matrix. ===
......@@ -533,7 +531,11 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()),
cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int globalRowDof = rowMap[*cursor].global;
MultiIndex rowMultiIndex;
if ((*interiorMap)[rowFe].find(*cursor, rowMultiIndex) == false)
continue;
int globalRowDof = rowMultiIndex.global;
// Test if the current row DOF is a periodic DOF.
bool periodicRow = perMap.isPeriodic(rowFe, globalRowDof);
......@@ -551,7 +553,11 @@ namespace AMDiS {
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof = colMap[col(*icursor)].global;
MultiIndex colMultiIndex;
if ((*interiorMap)[colFe].find(col(*icursor), colMultiIndex) == false)
continue;
int globalColDof = colMultiIndex.global;
// Test if the current col dof is a periodic dof.
bool periodicCol = perMap.isPeriodic(colFe, globalColDof);
// Get PETSc's mat col index.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment