Commit 0e951559 authored by Thomas Witkowski's avatar Thomas Witkowski

And removed SubDomain solver files.

parent 14e8e4f8
......@@ -252,8 +252,7 @@ if(ENABLE_PARALLEL_DOMAIN)
${SOURCE_DIR}/parallel/PetscSolverFeti.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc
${SOURCE_DIR}/parallel/PetscSolverSchur.cc
${SOURCE_DIR}/parallel/SubDomainSolver.cc)
${SOURCE_DIR}/parallel/PetscSolverSchur.cc)
elseif(ENABLE_PARALLEL_DOMAIN STREQUAL "PMTL")
set(MTL_INCLUDE_DIR "")
find_package(MTL REQUIRED)
......
......@@ -124,7 +124,7 @@ namespace AMDiS {
inline bool isCoarseSpace(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
FUNCNAME("SubDomainSolver::isCoarseSpace()");
FUNCNAME("PetscSolver::isCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
......@@ -133,7 +133,7 @@ namespace AMDiS {
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("SubDomainSolver::getRhsCoarseSpace()");
FUNCNAME("PetscSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
......@@ -152,7 +152,7 @@ namespace AMDiS {
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("SubDomainSolver::getMatCoarseCoarse()");
FUNCNAME("PetscSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
......@@ -161,7 +161,7 @@ namespace AMDiS {
inline Mat& getMatIntCoarse()
{
FUNCNAME("SubDomainSolver::getMatIntCoarse()");
FUNCNAME("PetscSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
......@@ -170,7 +170,7 @@ namespace AMDiS {
inline Mat& getMatCoarseInt()
{
FUNCNAME("SubDomainSolver::getMatCoarseInt()");
FUNCNAME("PetscSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
......
......@@ -15,7 +15,6 @@
#include "parallel/PetscSolverFetiStructs.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
#include "parallel/SubDomainSolver.h"
#include "parallel/PetscSolverGlobalMatrix.h"
#include "io/VtkWriter.h"
......
......@@ -26,7 +26,6 @@
#include "parallel/PetscSolverFetiStructs.h"
#include "parallel/ParallelDofMapping.h"
#include "parallel/ParallelTypes.h"
#include "parallel/SubDomainSolver.h"
#ifndef AMDIS_PETSC_SOLVER_FETI_H
#define AMDIS_PETSC_SOLVER_FETI_H
......
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
#include "parallel/SubDomainSolver.h"
#include "parallel/ParallelDebug.h"
#include "SystemVector.h"
#include "AMDiS.h"
namespace AMDiS {
using namespace std;
void SubDomainSolver::setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs)
{
interiorMap = interiorDofs;
coarseSpaceMap = coarseDofs;
if (mpiCommLocal.Get_size() == 1) {
rStartInterior = 0;
nGlobalOverallInterior = interiorMap->getOverallDofs();
} else {
int groupRowsInterior = 0;
if (mpiCommLocal.Get_rank() == 0)
groupRowsInterior = interiorMap->getOverallDofs();
mpi::getDofNumbering(mpiCommGlobal, groupRowsInterior,
rStartInterior, nGlobalOverallInterior);
int tmp = 0;
if (mpiCommLocal.Get_rank() == 0)
tmp = rStartInterior;
mpiCommLocal.Allreduce(&tmp, &rStartInterior, 1, MPI_INT, MPI_SUM);
}
}
void SubDomainSolver::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("SubDomainSolver::fillPetscMatrix()");
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
int nRowsRankInterior = interiorMap->getRankDofs();
int nRowsOverallInterior = interiorMap->getOverallDofs();
if (subdomainLevel == 0) {
nGlobalOverallInterior = nRowsOverallInterior;
MatCreateSeqAIJ(mpiCommLocal, nRowsRankInterior, nRowsRankInterior,
60, PETSC_NULL, &matIntInt);
} else {
MatCreateMPIAIJ(mpiCommLocal,
nRowsRankInterior, nRowsRankInterior,
nRowsOverallInterior, nRowsOverallInterior,
60, PETSC_NULL, 60, PETSC_NULL, &matIntInt);
}
if (coarseSpaceMap) {
int nRowsRankCoarse = coarseSpaceMap->getRankDofs();
int nRowsOverallCoarse = coarseSpaceMap->getOverallDofs();
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankCoarse,
nRowsOverallCoarse, nRowsOverallCoarse,
60, PETSC_NULL, 60, PETSC_NULL, &matCoarseCoarse);
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankCoarse, nRowsRankInterior,
nRowsOverallCoarse, nGlobalOverallInterior,
60, PETSC_NULL, 60, PETSC_NULL, &matCoarseInt);
MatCreateMPIAIJ(mpiCommGlobal,
nRowsRankInterior, nRowsRankCoarse,
nGlobalOverallInterior, nRowsOverallCoarse,
60, PETSC_NULL, 60, PETSC_NULL, &matIntCoarse);
}
// === Prepare traverse of sequentially created matrices. ===
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits = mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> cols, colsOther;
vector<double> values, valuesOther;
cols.reserve(300);
colsOther.reserve(300);
values.reserve(300);
valuesOther.reserve(300);
// === Traverse all sequentially created matrices and add the values to ===
// === the global PETSc matrices. ===
int nComponents = mat->getSize();
for (int i = 0; i < nComponents; i++) {
for (int j = 0; j < nComponents; j++) {
if (!(*mat)[i][j])
continue;
traits::col<Matrix>::type col((*mat)[i][j]->getBaseMatrix());
traits::const_value<Matrix>::type value((*mat)[i][j]->getBaseMatrix());
// Traverse all rows.
for (cursor_type cursor = begin<row>((*mat)[i][j]->getBaseMatrix()),
cend = end<row>((*mat)[i][j]->getBaseMatrix()); cursor != cend; ++cursor) {
bool rowPrimal = isCoarseSpace(feSpaces[i], *cursor);
cols.clear();
colsOther.clear();
values.clear();
valuesOther.clear();
// Traverse all columns.
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
bool colPrimal = isCoarseSpace(feSpaces[j], col(*icursor));
if (colPrimal) {
if (rowPrimal) {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
} else {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
}
} else {
if (rowPrimal) {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
} else {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
}
}
} // for each nnz in row
// === Set matrix values. ===
if (rowPrimal) {
int rowIndex = coarseSpaceMap->getMatIndex(i, *cursor);
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = coarseSpaceMap->getMatIndex(j, cols[k]);
MatSetValues(matCoarseCoarse, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size()) {
if (subdomainLevel == 0) {
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = interiorMap->getMatIndex(j, colsOther[k]);
} else {
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] =
interiorMap->getMatIndex(j, colsOther[k]) + rStartInterior;
}
MatSetValues(matCoarseInt, 1, &rowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
} else {
int localRowIndex =
(subdomainLevel == 0 ? interiorMap->getLocalMatIndex(i, *cursor) :
interiorMap->getMatIndex(i, *cursor));
for (unsigned int k = 0; k < cols.size(); k++) {
if (subdomainLevel == 0)
cols[k] = interiorMap->getLocalMatIndex(j, cols[k]);
else
cols[k] = interiorMap->getMatIndex(j, cols[k]);
}
MatSetValues(matIntInt, 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size()) {
int globalRowIndex = interiorMap->getMatIndex(i, *cursor);
if (subdomainLevel != 0)
globalRowIndex += rStartInterior;
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = coarseSpaceMap->getMatIndex(j, colsOther[k]);
MatSetValues(matIntCoarse, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
}
}
}
}
// === Start global assembly procedure. ===
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
if (coarseSpaceMap) {
MatAssemblyBegin(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matCoarseCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matIntCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntCoarse, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matCoarseInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matCoarseInt, MAT_FINAL_ASSEMBLY);
}
// === Create solver for the non primal (thus local) variables. ===
KSPCreate(mpiCommLocal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(kspInterior, "interior_");
KSPSetType(kspInterior, KSPPREONLY);
PC pcInterior;
KSPGetPC(kspInterior, &pcInterior);
PCSetType(pcInterior, PCLU);
if (subdomainLevel == 0)
PCFactorSetMatSolverPackage(pcInterior, MATSOLVERUMFPACK);
else
PCFactorSetMatSolverPackage(pcInterior, MATSOLVERMUMPS);
KSPSetFromOptions(kspInterior);
}
void SubDomainSolver::fillPetscRhs(SystemVector *vec)
{
FUNCNAME("SubDomainSolver::fillPetscRhs()");
VecCreateMPI(mpiCommGlobal,
interiorMap->getRankDofs(),
nGlobalOverallInterior,
&rhsInterior);
if (coarseSpaceMap)
VecCreateMPI(mpiCommGlobal,
coarseSpaceMap->getRankDofs(),
coarseSpaceMap->getOverallDofs(),
&rhsCoarseSpace);
for (int i = 0; i < vec->getSize(); i++) {
const FiniteElemSpace *feSpace = vec->getDOFVector(i)->getFeSpace();
DOFVector<double>::Iterator dofIt(vec->getDOFVector(i), USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
int index = dofIt.getDOFIndex();
if (isCoarseSpace(feSpace, index)) {
index = coarseSpaceMap->getMatIndex(i, index);
VecSetValue(rhsCoarseSpace, index, *dofIt, ADD_VALUES);
} else {
index = interiorMap->getMatIndex(i, index) + rStartInterior;
VecSetValue(rhsInterior, index, *dofIt, ADD_VALUES);
}
}
}
VecAssemblyBegin(rhsInterior);
VecAssemblyEnd(rhsInterior);
if (coarseSpaceMap) {
VecAssemblyBegin(rhsCoarseSpace);
VecAssemblyEnd(rhsCoarseSpace);
}
}
void SubDomainSolver::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
{
FUNCNAME("SubDomainSolver::solvePetscMatrix()");
}
void SubDomainSolver::destroyVectorData()
{
FUNCNAME("SubDomainSolver::destroyVectorData()");
VecDestroy(&rhsCoarseSpace);
VecDestroy(&rhsInterior);
}
void SubDomainSolver::destroyMatrixData()
{
FUNCNAME("SubDomainSolver::destroyMatrixData()");
MatDestroy(&matIntInt);
MatDestroy(&matCoarseCoarse);
MatDestroy(&matCoarseInt);
MatDestroy(&matIntCoarse);
KSPDestroy(&kspInterior);
}
void SubDomainSolver::solve(Vec &rhs, Vec &sol)
{
FUNCNAME("SubDomainSolver::solve()");
PetscErrorCode solverError = KSPSolve(kspInterior, rhs, sol);
if (solverError != 0) {
AMDiS::finalize();
exit(-1);
}
}
void SubDomainSolver::solveGlobal(Vec &rhs, Vec &sol)
{
FUNCNAME("SubDomainSolver::solveGlobal()");
Vec tmp;
if (mpiCommLocal.Get_size() == 1)
VecCreateSeq(mpiCommLocal, interiorMap->getRankDofs(), &tmp);
else
VecCreateMPI(mpiCommLocal,
interiorMap->getRankDofs(),
interiorMap->getOverallDofs(),
&tmp);
PetscScalar *tmpValues, *rhsValues;
VecGetArray(tmp, &tmpValues);
VecGetArray(rhs, &rhsValues);
for (int i = 0; i < interiorMap->getRankDofs(); i++)
tmpValues[i] = rhsValues[i];
VecRestoreArray(rhs, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
KSPSolve(kspInterior, tmp, tmp);
VecGetArray(tmp, &tmpValues);
VecGetArray(sol, &rhsValues);
for (int i = 0; i < interiorMap->getRankDofs(); i++)
rhsValues[i] = tmpValues[i];
VecRestoreArray(sol, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
VecDestroy(&tmp);
}
vector<const FiniteElemSpace*> SubDomainSolver::getFeSpaces(Matrix<DOFMatrix*> *mat)
{
FUNCNAME("SubDomainSolver::getFeSpaces()");
int nComponents = mat->getNumRows();
vector<const FiniteElemSpace*> result(nComponents);
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) {
result[i] = (*mat)[i][j]->getRowFeSpace();
break;
}
return result;
}
vector<const FiniteElemSpace*> SubDomainSolver::getFeSpaces(SystemVector *vec)
{
FUNCNAME("SubDomainSolver::getFeSpaces()");
int nComponents = vec->getSize();
vector<const FiniteElemSpace*> result(nComponents);
for (int i = 0; i < nComponents; i++)
result[i] = vec->getFeSpace(i);
return result;
}
}
// ============================================================================
// == ==
// == AMDiS - Adaptive multidimensional simulations ==
// == ==
// == http://www.amdis-fem.org ==
// == ==
// ============================================================================
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
/** \file SubDomainSolver.h */
#ifndef AMDIS_SUBDOMAIN_SOLVER_H
#define AMDIS_SUBDOMAIN_SOLVER_H
#include <map>
#include <set>
#include <mpi.h>
#include <petsc.h>
#include "parallel/MeshDistributor.h"
#include "parallel/ParallelDofMapping.h"
namespace AMDiS {
using namespace std;
class SubDomainSolver {
public:
SubDomainSolver()
: meshDistributor(NULL),
subdomainLevel(0),
interiorMap(NULL),
coarseSpaceMap(NULL)
{}
void setMeshDistributor(MeshDistributor *m,
MPI::Intracomm mpiComm0,
MPI::Intracomm mpiComm1)
{
meshDistributor = m;
mpiCommGlobal = mpiComm0;
mpiCommLocal = mpiComm1;
}
void setLevel(int l)
{
subdomainLevel = l;
}
void setDofMapping(ParallelDofMapping *interiorDofs,
ParallelDofMapping *coarseDofs = NULL);
void fillPetscMatrix(Matrix<DOFMatrix*> *mat);
void fillPetscRhs(SystemVector *vec);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void destroyVectorData();
void destroyMatrixData();
void solve(Vec &rhs, Vec &sol);
void solveGlobal(Vec &rhs, Vec &sol);
inline bool isCoarseSpace(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
FUNCNAME("SubDomainSolver::isCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return (*coarseSpaceMap)[feSpace].isSet(dof);
}
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("SubDomainSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return rhsCoarseSpace;
}
inline Vec& getRhsInterior()
{
return rhsInterior;
}
inline Mat& getMatIntInt()
{
return matIntInt;
}
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("SubDomainSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseCoarse;
}
inline Mat& getMatIntCoarse()
{
FUNCNAME("SubDomainSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matIntCoarse;
}
inline Mat& getMatCoarseInt()
{
FUNCNAME("SubDomainSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseInt;
}
protected:
/// Returns a vector with the FE spaces of each row in the matrix. Thus, the
/// resulting vector may contain the same FE space multiple times.
vector<const FiniteElemSpace*> getFeSpaces(Matrix<DOFMatrix*> *mat);
/// Returns a vector with the FE spaces of all components of a system vector.
vector<const FiniteElemSpace*> getFeSpaces(SystemVector *vec);
protected:
MeshDistributor *meshDistributor;
int subdomainLevel;