Commit 6ef1e048 authored by Thomas Witkowski's avatar Thomas Witkowski

Some work on nested schur complement approach.

parent b640da9e
......@@ -136,7 +136,7 @@
#if HAVE_PARALLEL_MTL4
#include "parallel/Mtl4Solver.h"
#else
#include "parallel/PetscSolver.h"
#include "parallel/PetscProblemStat.h"
#endif
#endif
......
......@@ -1545,6 +1545,11 @@ namespace AMDiS {
BoundaryType *bound =
useGetBound ? new BoundaryType[basisFcts->getNumber()] : NULL;
if (matrix)
matrix->startInsertion(matrix->getNnz());
if (vector)
vector->set(0.0);
// == Traverse mesh and assemble. ==
......
......@@ -23,6 +23,10 @@
#ifndef AMDIS_SERIALIZER_H
#define AMDIS_SERIALIZER_H
#if HAVE_PARALLEL_DOMAIN_AMDIS
#include <mpi.h>
#endif
#include <map>
#include "boost/lexical_cast.hpp"
......
......@@ -1702,35 +1702,26 @@ namespace AMDiS {
}
// === Get starting position for global rank DOF ordering. ====
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size();
mpiComm.Scan(&nRankDofs, &rstart, 1, MPI_INT, MPI_SUM);
rstart -= nRankDofs;
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
// === Stores for all rank owned DOFs a new global index. ===
// Stores for all rank owned DOFs a new global index.
DofIndexMap rankDofsNewGlobalIndex;
for (int i = 0; i < nRankDofs; i++)
rankDofsNewGlobalIndex[rankDofs[i]] = i + rstart;
// === Calculate number of overall DOFs of all partitions. ===
nOverallDofs = 0;
mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
// === Send and receive new DOF indices. ===
#if (DEBUG != 0)
ParallelDebug::testDofContainerCommunication(*this, sendDofs, recvDofs);
#endif
int i = 0;
StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
for (RankToDofContainer::iterator sendIt = sendDofs.begin();
sendIt != sendDofs.end(); ++sendIt, i++) {
sendIt != sendDofs.end(); ++sendIt) {
stdMpi.getSendData(sendIt->first).resize(0);
stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
for (DofContainer::iterator dofIt = sendIt->second.begin();
......@@ -1751,10 +1742,10 @@ namespace AMDiS {
for (RankToDofContainer::iterator recvIt = recvDofs.begin();
recvIt != recvDofs.end(); ++recvIt) {
int j = 0;
int i = 0;
for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) {
rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[j++];
rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[i++];
isRankDof[**dofIt] = false;
}
}
......
......@@ -25,6 +25,8 @@
#include <map>
#include <set>
#include <mpi.h>
#include "AMDiS_fwd.h"
#include "Mesh.h"
......
......@@ -48,6 +48,31 @@ namespace AMDiS {
{
srand(time(NULL) * (MPI::COMM_WORLD.Get_rank() + 1));
}
/** \brief
* In many situations a rank computes a number of local DOFs. Then all
* ranks want to know the number of global DOFs and the starting
* displacment number of the DOF numbering in each rank.
*
* \param[in] mpiComm The MPI communicator.
* \param[in] nRankDofs The number of local DOFs.
* \param[out] rStartDofs Displacment of the DOF numbering. On rank n
* this is the sum of all local DOF numbers in
* ranks 0 to n - 1.
* \param[out] nOverallDofs Global sum of nRankDofs. Is equal on all
* ranks.
*/
inline void getDofNumbering(MPI::Intracomm& mpiComm,
int nRankDofs,
int& rStartDofs,
int& nOverallDofs)
{
rStartDofs = 0;
nOverallDofs = 0;
mpiComm.Scan(&nRankDofs, &rStartDofs, 1, MPI_INT, MPI_SUM);
rStartDofs -= nRankDofs;
mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
}
}
}
......
......@@ -29,6 +29,7 @@ namespace AMDiS {
double wtime = MPI::Wtime();
petscSolver->setMeshDistributor(meshDistributor);
petscSolver->fillPetscMatrix(systemMatrix, rhs);
petscSolver->solvePetscMatrix(*solution, adaptInfo);
......
......@@ -42,12 +42,12 @@ namespace AMDiS {
if (name == "petsc-schur") {
#ifdef HAVE_PETSC_DEV
petscSolver = new PetscSolverSchur(meshDistributor);
petscSolver = new PetscSolverSchur();
#else
ERROR_EXIT("Petsc schur complement solver is only supported when petsc-dev is used!\n");
#endif
} else if (name == "petsc" || name == "") {
petscSolver = new PetscSolverGlobalMatrix(meshDistributor);
petscSolver = new PetscSolverGlobalMatrix();
} else {
ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
}
......
This diff is collapsed.
......@@ -23,15 +23,19 @@
#ifndef AMDIS_PETSC_SOLVER_H
#define AMDIS_PETSC_SOLVER_H
#include <set>
#include <map>
#include <mpi.h>
#include "AMDiS_fwd.h"
#include "Global.h"
#include "Parameters.h"
#include "DOFMatrix.h"
#include "parallel/MeshDistributor.h"
#include "petsc.h"
#include "petscsys.h"
#include "petscao.h"
#include <petsc.h>
#include <petscsys.h>
#include <petscao.h>
namespace AMDiS {
......@@ -40,12 +44,17 @@ namespace AMDiS {
class PetscSolver
{
public:
PetscSolver(MeshDistributor *m)
: meshDistributor(m)
PetscSolver()
: meshDistributor(NULL)
{}
virtual ~PetscSolver() {}
void setMeshDistributor(MeshDistributor *m)
{
meshDistributor = m;
}
/** \brief
* Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
* create the nnz structure of the PETSc matrix and the values are transfered to it.
......@@ -67,8 +76,8 @@ namespace AMDiS {
class PetscSolverGlobalMatrix : public PetscSolver
{
public:
PetscSolverGlobalMatrix(MeshDistributor *m)
: PetscSolver(m),
PetscSolverGlobalMatrix()
: PetscSolver(),
d_nnz(NULL),
o_nnz(NULL),
lastMeshNnz(0),
......@@ -82,14 +91,14 @@ namespace AMDiS {
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
protected:
/// Creates a new non zero pattern structure for the Petsc matrix.
/// Creates a new non zero pattern structure for the PETSc matrix.
void createPetscNnzStructure(Matrix<DOFMatrix*> *mat);
/// Takes a dof matrix and sends the values to the global petsc matrix.
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
/// Takes a dof vector and sends its values to a given petsc vector.
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false);
......@@ -98,7 +107,7 @@ namespace AMDiS {
Mat petscMatrix;
/** \brief
* Petsc's vector structures for the rhs vector, the solution vector and a
* PETSc's vector structures for the rhs vector, the solution vector and a
* temporary vector for calculating the final residuum.
*/
Vec petscRhsVec, petscSolVec, petscTmpVec;
......@@ -122,8 +131,8 @@ namespace AMDiS {
class PetscSolverSchur : public PetscSolver
{
public:
PetscSolverSchur(MeshDistributor *m)
: PetscSolver(m)
PetscSolverSchur()
: PetscSolver()
{}
void fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec);
......@@ -131,9 +140,46 @@ namespace AMDiS {
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
protected:
vector<DegreeOfFreedom> boundaryDofs;
void updateDofData();
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(DOFMatrix* mat, int dispMult = 1,
int dispAddRow = 0, int dispAddCol = 0);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void setDofVector(Vec& petscVec, DOFVector<double>* vec,
int disMult = 1, int dispAdd = 0, bool rankOnly = false);
protected:
int nBoundaryDofs;
int rStartBoundaryDofs;
int nOverallBoundaryDofs;
std::set<DegreeOfFreedom> boundaryDofs;
map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalBoundaryDof;
int nInteriorDofs;
int rStartInteriorDofs;
int nOverallInteriorDofs;
std::set<DegreeOfFreedom> interiorDofs;
map<DegreeOfFreedom, DegreeOfFreedom> mapGlobalInteriorDof;
Mat matA11, matA12, matA21, matA22;
Mat petscMatrix;
Vec petscRhsVec1, petscRhsVec2;
Vec petscSolVec1, petscSolVec2;
vector<DegreeOfFreedom> interiorDofs;
Vec petscRhsVec, petscSolVec;
};
#endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment