Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konto der externen Nutzer:innen sind über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab. The administrators

Commit ac22232d authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Added PETSc global matrix block solver.

parent b50e1a5e
......@@ -242,6 +242,7 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/PetscProblemStat.cc
${SOURCE_DIR}/parallel/PetscSolverFeti.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc)
elseif(ENABLE_PARALLEL_DOMAIN STREQUAL "PMTL")
set(MTL_INCLUDE_DIR "")
......
......@@ -44,6 +44,8 @@ namespace AMDiS {
#else
ERROR_EXIT("PETSc FETI-DP solver is only supported when petsc-dev is used!\n");
#endif
} else if (name == "petsc-block") {
petscSolver = new PetscSolverGlobalBlockMatrix();
} else if (name == "petsc" || name == "") {
petscSolver = new PetscSolverGlobalMatrix();
} else {
......
......@@ -29,6 +29,7 @@
#include "parallel/PetscSolver.h"
#include "parallel/PetscSolverFeti.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/PetscSolverSchur.h"
namespace AMDiS {
......
......@@ -20,14 +20,12 @@ namespace AMDiS {
using namespace std;
#ifdef HAVE_PETSC_DEV
FetiStatisticsData PetscSolverFeti::fetiStatistics;
// y = mat * x
int petscMultMatSchurPrimal(Mat mat, Vec x, Vec y)
{
// S_PiPi = K_PiPi - K_PiB inv(K_BB) K_BPi
double first = MPI::Wtime();
void *ctx;
MatShellGetContext(mat, &ctx);
SchurPrimalData* data = static_cast<SchurPrimalData*>(ctx);
......@@ -38,9 +36,6 @@ namespace AMDiS {
MatMult(*(data->mat_primal_primal), x, y);
VecAXPBY(y, -1.0, 1.0, data->tmp_vec_primal);
PetscSolverFeti::fetiStatistics.nSchurPrimalApply++;
PetscSolverFeti::fetiStatistics.timeSchurPrimalApply += MPI::Wtime() - first;
return 0;
}
......@@ -67,8 +62,6 @@ namespace AMDiS {
VecAXPBY(y, 1.0, 1.0, data->tmp_vec_lagrange);
PetscSolverFeti::fetiStatistics.nFetiApply++;
return 0;
}
......@@ -1613,11 +1606,7 @@ namespace AMDiS {
VecAXPBY(tmp_primal0, 1.0, -1.0, f_primal);
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
double first = MPI::Wtime();
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
PetscSolverFeti::fetiStatistics.nSchurPrimalSolve++;
PetscSolverFeti::fetiStatistics.timeSchurPrimalSolve +=
MPI::Wtime() - first;
//
MatMult(mat_b_primal, tmp_primal0, tmp_b0);
......@@ -1629,9 +1618,7 @@ namespace AMDiS {
// === Solve with FETI-DP operator. ===
first = MPI::Wtime();
KSPSolve(ksp_feti, vec_rhs, vec_rhs);
fetiStatistics.timeFetiApply = MPI::Wtime() -first;
// === Solve for u_primals. ===
......@@ -1644,11 +1631,7 @@ namespace AMDiS {
MatMult(mat_primal_b, tmp_b0, tmp_primal1);
VecAXPBY(tmp_primal0, 1.0, 1.0, tmp_primal1);
first = MPI::Wtime();
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
PetscSolverFeti::fetiStatistics.nSchurPrimalSolve++;
PetscSolverFeti::fetiStatistics.timeSchurPrimalSolve +=
MPI::Wtime() - first;
// === Solve for u_b. ===
......@@ -1719,43 +1702,12 @@ namespace AMDiS {
if (debug) {
solveFetiMatrix(vec);
} else {
// resetStatistics();
solveReducedFetiMatrix(vec);
// printStatistics();
}
MeshDistributor::globalMeshDistributor->synchVector(vec);
}
void PetscSolverFeti::resetStatistics()
{
fetiStatistics.nFetiApply = 0;
fetiStatistics.timeFetiApply = 0.0;
fetiStatistics.nSchurPrimalApply = 0;
fetiStatistics.timeSchurPrimalApply = 0.0;
fetiStatistics.nSchurPrimalSolve = 0;
fetiStatistics.timeSchurPrimalSolve = 0.0;
fetiStatistics.nLocalSolve = 0;
fetiStatistics.timeLocalSolve = 0.0;
}
void PetscSolverFeti::printStatistics()
{
FUNCNAME("PetscSolverFeti::printStatistics()");
if (MPI::COMM_WORLD.Get_rank() == 0)
MSG("FETI-STATISTICS: %d %f %d %f %d %f %d %f\n",
fetiStatistics.nFetiApply,
fetiStatistics.timeFetiApply,
fetiStatistics.nSchurPrimalApply,
fetiStatistics.timeSchurPrimalApply,
fetiStatistics.nSchurPrimalSolve,
fetiStatistics.timeSchurPrimalSolve,
fetiStatistics.nLocalSolve,
fetiStatistics.timeLocalSolve);
}
#endif
}
......@@ -125,35 +125,7 @@ namespace AMDiS {
} FetiPreconditioner;
struct FetiStatisticsData
{
/// Number of application of the FETI-DP operator.
int nFetiApply;
/// Time for solving the reduced FETI system.
double timeFetiApply;
/// Number of application of the Schur primal operator.
int nSchurPrimalApply;
/// Time for appling the Schur primal operator.
double timeSchurPrimalApply;
/// Number of solution of the Schur primal system.
int nSchurPrimalSolve;
/// Time for solving the Schur primal system.
double timeSchurPrimalSolve;
/// Number of solution of the local subdomain problems.
int nLocalSolve;
/// Time for solving the local subdomain problems.
double timeLocalSolve;
};
/** \brief
/** \brief
* FETI-DP implementation based on PETSc.
*/
class PetscSolverFeti : public PetscSolver
......@@ -365,9 +337,6 @@ namespace AMDiS {
// Number of local nodes that are duals.
int nLocalDuals;
public:
static FetiStatisticsData fetiStatistics;
};
#endif
......
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
#include "parallel/PetscSolverGlobalBlockMatrix.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
namespace AMDiS {
void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
double wtime = MPI::Wtime();
nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscSolVec);
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscTmpVec);
#if (DEBUG != 0)
MSG("Fill petsc matrix 1 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
nestMat = new Mat*[nComponents];
for (int i = 0; i < nComponents; i++)
nestMat[i] = new Mat[nComponents];
// === Transfer values from DOF matrices to the PETSc matrix. ===
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) {
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRankRows, nRankRows,
nOverallRows, nOverallRows,
30, PETSC_NULL, 30, PETSC_NULL,
&(nestMat[i][j]));
setDofMatrix(nestMat[i][j], (*mat)[i][j]);
} else {
nestMat[i][j] = PETSC_NULL;
}
MatCreateNest(PETSC_COMM_WORLD,
nComponents, PETSC_NULL, nComponents, PETSC_NULL,
&(nestMat[0][0]), &petscMatrix);
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
// === Init PETSc solver. ===
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
}
void PetscSolverGlobalBlockMatrix::fillPetscRhs(SystemVector *vec)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscRhs()");
TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
nComponents = vec->getSize();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
int nOverallRows = meshDistributor->getNumberOverallDofs() * nComponents;
VecCreateMPI(PETSC_COMM_WORLD, nRankRows, nOverallRows, &petscRhsVec);
// === Transfer values from DOF vector to the PETSc vector. ===
for (int i = 0; i < nComponents; i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
}
void PetscSolverGlobalBlockMatrix::solvePetscMatrix(SystemVector &vec,
AdaptInfo *adaptInfo)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");
// PETSc.
KSPSolve(solver, petscRhsVec, petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int nRankDofs = meshDistributor->getNumberRankDofs();
PetscScalar *vecPointer;
VecGetArray(petscSolVec, &vecPointer);
for (int i = 0; i < nComponents; i++) {
DOFVector<double> &dofvec = *(vec.getDOFVector(i));
for (int j = 0; j < nRankDofs; j++)
dofvec[meshDistributor->mapLocalToDofIndex(j)] =
vecPointer[i * nComponents + j];
}
VecRestoreArray(petscSolVec, &vecPointer);
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// === more than one partition. ===
meshDistributor->synchVector(vec);
// Print iteration counter and residual norm of the solution.
printSolutionInfo(adaptInfo);
// === Destroy PETSc's variables. ===
VecDestroy(&petscRhsVec);
}
void PetscSolverGlobalBlockMatrix::destroyMatrixData()
{
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyMatrixData()");
MatDestroy(&petscMatrix);
KSPDestroy(&solver);
VecDestroy(&petscSolVec);
VecDestroy(&petscTmpVec);
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if (nestMat[i][j] != PETSC_NULL)
MatDestroy(&(nestMat[i][j]));
}
delete[] nestMat[i];
}
delete[] nestMat;
}
void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat, DOFMatrix* mat)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");
TEST_EXIT(mat)("No DOFMatrix!\n");
TEST_EXIT(petscMat)("No PETSc matrix!\n");
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits = mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
traits::col<Matrix>::type col(mat->getBaseMatrix());
traits::const_value<Matrix>::type value(mat->getBaseMatrix());
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> cols;
vector<double> values;
cols.reserve(300);
values.reserve(300);
// === Traverse all rows of the dof matrix and insert row wise the values ===
// === to the PETSc matrix. ===
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int rowIndex = meshDistributor->mapLocalToGlobal(*cursor);
cols.clear();
values.clear();
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int colIndex = meshDistributor->mapLocalToGlobal(col(*icursor));
// Ignore all zero entries, expect it is a diagonal entry.
if (value(*icursor) == 0.0 && rowIndex != colIndex)
continue;
// Calculate the exact position of the column index in the PETSc matrix.
cols.push_back(colIndex);
values.push_back(value(*icursor));
}
MatSetValues(petscMatrix, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
}
}
void PetscSolverGlobalBlockMatrix::setDofVector(Vec& petscVec,
DOFVector<double>* vec,
int nComponents,
int row,
bool rankOnly)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::setDofVector()");
// Traverse all used DOFs in the dof vector.
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the DOF.
DegreeOfFreedom globalRowDof =
meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
// Calculate PETSc index of the row DOF.
int index = nComponents * row + globalRowDof;
double value = *dofIt;
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
}
}
}
// ============================================================================
// == ==
// == AMDiS - Adaptive multidimensional simulations ==
// == ==
// == http://www.amdis-fem.org ==
// == ==
// ============================================================================
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
/** \file PetscSolverGlobalBlockMatrix.h */
#ifndef AMDIS_PETSC_SOLVER_GLOBAL_BLOCK_MATRIX_H
#define AMDIS_PETSC_SOLVER_GLOBAL_BLOCK_MATRIX_H
#include "AMDiS_fwd.h"
#include "parallel/PetscSolver.h"
namespace AMDiS {
using namespace std;
class PetscSolverGlobalBlockMatrix : public PetscSolver
{
public:
PetscSolverGlobalBlockMatrix()
: PetscSolver(),
nComponents(0)
{}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat);
void fillPetscRhs(SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void destroyMatrixData();
protected:
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(Mat& petscMat, DOFMatrix* mat);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int nComponents, int row, bool rankOnly = false);
protected:
Mat **nestMat;
int nComponents;
};
}
#endif
......@@ -16,15 +16,6 @@
namespace AMDiS {
PetscErrorCode myKSPMonitor(KSP ksp, PetscInt iter, PetscReal rnorm, void *)
{
if (iter % 100 == 0 && MPI::COMM_WORLD.Get_rank() == 0)
cout << "[0] Petsc-Iteration " << iter << ": " << rnorm << endl;
return 0;
}
void PetscSolverGlobalMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
......@@ -102,7 +93,6 @@ namespace AMDiS {
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(solver, KSPBCGS);
KSPMonitorSet(solver, myKSPMonitor, PETSC_NULL, 0);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment