Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit 6ef1e048 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Some work on nested schur complement approach.

parent b640da9e
......@@ -136,7 +136,7 @@
#if HAVE_PARALLEL_MTL4
#include "parallel/Mtl4Solver.h"
#else
#include "parallel/PetscSolver.h"
#include "parallel/PetscProblemStat.h"
#endif
#endif
......
......@@ -1546,6 +1546,11 @@ namespace AMDiS {
BoundaryType *bound =
useGetBound ? new BoundaryType[basisFcts->getNumber()] : NULL;
if (matrix)
matrix->startInsertion(matrix->getNnz());
if (vector)
vector->set(0.0);
// == Traverse mesh and assemble. ==
ElInfo *elInfo = stack.traverseFirst(mesh, -1, assembleFlag);
......
......@@ -23,6 +23,10 @@
#ifndef AMDIS_SERIALIZER_H
#define AMDIS_SERIALIZER_H
#if HAVE_PARALLEL_DOMAIN_AMDIS
#include <mpi.h>
#endif
#include <map>
#include "boost/lexical_cast.hpp"
......
......@@ -1702,35 +1702,26 @@ namespace AMDiS {
}
// === Get starting position for global rank DOF ordering. ====
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs = rankDofs.size();
mpiComm.Scan(&nRankDofs, &rstart, 1, MPI_INT, MPI_SUM);
rstart -= nRankDofs;
mpi::getDofNumbering(mpiComm, nRankDofs, rstart, nOverallDofs);
// === Stores for all rank owned DOFs a new global index. ===
// Stores for all rank owned DOFs a new global index.
DofIndexMap rankDofsNewGlobalIndex;
for (int i = 0; i < nRankDofs; i++)
rankDofsNewGlobalIndex[rankDofs[i]] = i + rstart;
// === Calculate number of overall DOFs of all partitions. ===
nOverallDofs = 0;
mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
// === Send and receive new DOF indices. ===
#if (DEBUG != 0)
ParallelDebug::testDofContainerCommunication(*this, sendDofs, recvDofs);
#endif
int i = 0;
StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
for (RankToDofContainer::iterator sendIt = sendDofs.begin();
sendIt != sendDofs.end(); ++sendIt, i++) {
sendIt != sendDofs.end(); ++sendIt) {
stdMpi.getSendData(sendIt->first).resize(0);
stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
for (DofContainer::iterator dofIt = sendIt->second.begin();
......@@ -1751,10 +1742,10 @@ namespace AMDiS {
for (RankToDofContainer::iterator recvIt = recvDofs.begin();
recvIt != recvDofs.end(); ++recvIt) {
int j = 0;
int i = 0;
for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) {
rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[j++];
rankDofsNewGlobalIndex[*dofIt] = stdMpi.getRecvData(recvIt->first)[i++];
isRankDof[**dofIt] = false;
}
}
......
......@@ -25,6 +25,8 @@
#include <map>
#include <set>
#include <mpi.h>
#include "AMDiS_fwd.h"
#include "Mesh.h"
......
......@@ -48,6 +48,31 @@ namespace AMDiS {
{
srand(time(NULL) * (MPI::COMM_WORLD.Get_rank() + 1));
}
/** \brief
* In many situations a rank computes a number of local DOFs. Then all
* ranks want to know the number of global DOFs and the starting
* displacment number of the DOF numbering in each rank.
*
* \param[in] mpiComm The MPI communicator.
* \param[in] nRankDofs The number of local DOFs.
* \param[out] rStartDofs Displacment of the DOF numbering. On rank n
* this is the sum of all local DOF numbers in
* ranks 0 to n - 1.
* \param[out] nOverallDofs Global sum of nRankDofs. Is equal on all
* ranks.
*/
inline void getDofNumbering(MPI::Intracomm& mpiComm,
int nRankDofs,
int& rStartDofs,
int& nOverallDofs)
{
rStartDofs = 0;
nOverallDofs = 0;
mpiComm.Scan(&nRankDofs, &rStartDofs, 1, MPI_INT, MPI_SUM);
rStartDofs -= nRankDofs;
mpiComm.Allreduce(&nRankDofs, &nOverallDofs, 1, MPI_INT, MPI_SUM);
}
}
}
......
......@@ -29,6 +29,7 @@ namespace AMDiS {
double wtime = MPI::Wtime();
petscSolver->setMeshDistributor(meshDistributor);
petscSolver->fillPetscMatrix(systemMatrix, rhs);
petscSolver->solvePetscMatrix(*solution, adaptInfo);
......
......@@ -42,12 +42,12 @@ namespace AMDiS {
if (name == "petsc-schur") {
#ifdef HAVE_PETSC_DEV
petscSolver = new PetscSolverSchur(meshDistributor);
petscSolver = new PetscSolverSchur();
#else
ERROR_EXIT("Petsc schur complement solver is only supported when petsc-dev is used!\n");
#endif
} else if (name == "petsc" || name == "") {
petscSolver = new PetscSolverGlobalMatrix(meshDistributor);
petscSolver = new PetscSolverGlobalMatrix();
} else {
ERROR_EXIT("No parallel solver %s available!\n", name.c_str());
}
......
......@@ -12,6 +12,7 @@
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
#include "parallel/MpiHelper.h"
namespace AMDiS {
......@@ -30,6 +31,10 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
TEST_EXIT_DBG(vec)("NO DOF vector defined!\n");
double wtime = MPI::Wtime();
int nComponents = mat->getNumRows();
int nRankRows = meshDistributor->getNumberRankDofs() * nComponents;
......@@ -233,9 +238,9 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row dof.
// Global index of the current row DOF.
int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
// Test if the current row dof is a periodic dof.
// Test if the current row DOF is a periodic DOF.
bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
if (!periodicRow) {
......@@ -264,7 +269,7 @@ namespace AMDiS {
if (!periodicCol) {
// Calculate the exact position of the column index in the PETSc matrix.
cols.push_back(globalColDof * dispMult + dispAddCol);
cols.push_back(colIndex);
values.push_back(value(*icursor));
} else {
// === Row index is not periodic, but column index is. ===
......@@ -434,10 +439,10 @@ namespace AMDiS {
if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the dof.
// Calculate global row index of the DOF.
DegreeOfFreedom globalRowDof =
meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
// Calculate PETSc index of the row dof.
// Calculate PETSc index of the row DOF.
int index = globalRowDof * dispMult + dispAdd;
if (meshDistributor->isPeriodicDof(globalRowDof)) {
......@@ -451,7 +456,7 @@ namespace AMDiS {
VecSetValues(petscVec, 1, &mappedIndex, &value, ADD_VALUES);
}
} else {
// The dof index is not periodic.
// The DOF index is not periodic.
double value = *dofIt;
VecSetValues(petscVec, 1, &index, &value, ADD_VALUES);
}
......@@ -653,31 +658,40 @@ namespace AMDiS {
#ifdef HAVE_PETSC_DEV
void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
void PetscSolverSchur::updateDofData()
{
FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
FUNCNAME("PetscSolverSchur::updateDofData()");
int nComponents = mat->getNumRows();
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
MPI::Intracomm& mpiComm = meshDistributor->getMpiComm();
typedef map<int, DofContainer> RankToDofContainer;
typedef map<DegreeOfFreedom, bool> DofIndexToBool;
std::set<DegreeOfFreedom> boundaryDofsSet;
boundaryDofs.clear();
std::set<DegreeOfFreedom> boundaryLocalDofs;
RankToDofContainer& sendDofs = meshDistributor->getSendDofs();
for (RankToDofContainer::iterator rankIt = sendDofs.begin();
rankIt != sendDofs.end(); ++rankIt)
rankIt != sendDofs.end(); ++rankIt) {
for (DofContainer::iterator dofIt = rankIt->second.begin();
dofIt != rankIt->second.end(); ++dofIt) {
boundaryLocalDofs.insert(**dofIt);
for (int i = 0; i < nComponents; i++)
boundaryDofsSet.insert(meshDistributor->mapLocalToGlobal(**dofIt) * nComponents + i);
boundaryDofs.insert(meshDistributor->mapLocalToGlobal(**dofIt));
}
}
boundaryDofs.clear();
boundaryDofs.insert(boundaryDofs.begin(),
boundaryDofsSet.begin(), boundaryDofsSet.end());
nBoundaryDofs = boundaryDofs.size();
mpi::getDofNumbering(mpiComm, nBoundaryDofs,
rStartBoundaryDofs, nOverallBoundaryDofs);
int counter = rStartBoundaryDofs;
for (std::set<DegreeOfFreedom>::iterator it = boundaryDofs.begin();
it != boundaryDofs.end(); ++it)
mapGlobalBoundaryDof[*it] = counter++;
std::set<DegreeOfFreedom> otherBoundaryLocalDofs;
RankToDofContainer& recvDofs = meshDistributor->getRecvDofs();
......@@ -687,19 +701,153 @@ namespace AMDiS {
dofIt != rankIt->second.end(); ++dofIt)
otherBoundaryLocalDofs.insert(**dofIt);
std::set<DegreeOfFreedom> interiorDofsSet;
interiorDofs.clear();
DofIndexToBool& isRankDof = meshDistributor->getIsRankDof();
for (DofIndexToBool::iterator dofIt = isRankDof.begin();
dofIt != isRankDof.end(); ++dofIt)
dofIt != isRankDof.end(); ++dofIt) {
if (dofIt->second &&
boundaryLocalDofs.count(dofIt->first) == 0 &&
otherBoundaryLocalDofs.count(dofIt->first) == 0)
interiorDofs.insert(meshDistributor->mapLocalToGlobal(dofIt->first));
}
nInteriorDofs = interiorDofs.size();
mpi::getDofNumbering(mpiComm, nInteriorDofs,
rStartInteriorDofs, nOverallInteriorDofs);
counter = rStartInteriorDofs;
for (std::set<DegreeOfFreedom>::iterator it = interiorDofs.begin();
it != interiorDofs.end(); ++it)
mapGlobalInteriorDof[*it] = counter++;
TEST_EXIT_DBG(nInteriorDofs > 0)("Should not happen!\n");
StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm, false);
for (RankToDofContainer::iterator sendIt = meshDistributor->getSendDofs().begin();
sendIt != meshDistributor->getSendDofs().end(); ++sendIt) {
stdMpi.getSendData(sendIt->first).resize(0);
stdMpi.getSendData(sendIt->first).reserve(sendIt->second.size());
for (DofContainer::iterator dofIt = sendIt->second.begin();
dofIt != sendIt->second.end(); ++dofIt) {
int globalSendDof = meshDistributor->mapLocalToGlobal(**dofIt);
TEST_EXIT_DBG(mapGlobalBoundaryDof.count(globalSendDof))
("No mapping for boundary DOF %d!\n", globalSendDof);
stdMpi.getSendData(sendIt->first).push_back(mapGlobalBoundaryDof[globalSendDof]);
}
}
stdMpi.updateSendDataSize();
stdMpi.recv(meshDistributor->getRecvDofs());
stdMpi.startCommunication();
for (RankToDofContainer::iterator recvIt = meshDistributor->getRecvDofs().begin();
recvIt != meshDistributor->getRecvDofs().end(); ++recvIt) {
int i = 0;
for (DofContainer::iterator dofIt = recvIt->second.begin();
dofIt != recvIt->second.end(); ++dofIt) {
int globalRecvDof = meshDistributor->mapLocalToGlobal(**dofIt);
mapGlobalBoundaryDof[globalRecvDof] = stdMpi.getRecvData(recvIt->first)[i++];
}
}
}
void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat, SystemVector *vec)
{
FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
updateDofData();
int nComponents = mat->getNumRows();
int nInteriorRows = nInteriorDofs * nComponents;
int nOverallInteriorRows = nOverallInteriorDofs * nComponents;
int nBoundaryRows = nBoundaryDofs * nComponents;
int nOverallBoundaryRows = nOverallBoundaryDofs * nComponents;
MSG("INTERIOR LOCAL MAT: %d x %d\n", nInteriorRows, nInteriorRows);
MSG("INTERIOR GLOBAL MAT: %d x %d\n", nOverallInteriorRows, nOverallInteriorRows);
MSG("BOUNDARY LOCAL MAT: %d x %d\n", nBoundaryRows, nBoundaryRows);
MSG("BOUNDARY GLOBAL MAT: %d x %d\n", nOverallBoundaryRows, nOverallBoundaryRows);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nInteriorRows, nInteriorRows,
nOverallInteriorRows, nOverallInteriorRows,
100, PETSC_NULL, 100, PETSC_NULL, &matA11);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nBoundaryRows, nBoundaryRows,
nOverallBoundaryRows, nOverallBoundaryRows,
100, PETSC_NULL, 100, PETSC_NULL, &matA22);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nInteriorRows, nBoundaryRows,
nOverallInteriorRows, nOverallBoundaryRows,
100, PETSC_NULL, 100, PETSC_NULL, &matA12);
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nBoundaryRows, nInteriorRows,
nOverallBoundaryRows, nOverallInteriorRows,
100, PETSC_NULL, 100, PETSC_NULL, &matA21);
for (int i = 0; i < nComponents; i++)
interiorDofsSet.insert(meshDistributor->mapLocalToGlobal(dofIt->first) * nComponents + i);
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j])
setDofMatrix((*mat)[i][j], nComponents, i, j);
interiorDofs.clear();
interiorDofs.insert(interiorDofs.begin(),
interiorDofsSet.begin(), interiorDofsSet.end());
MatAssemblyBegin(matA11, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matA11, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matA12, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matA12, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matA21, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matA21, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(matA22, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matA22, MAT_FINAL_ASSEMBLY);
Mat tmpMat[2][2];
tmpMat[0][0] = matA11;
tmpMat[0][1] = matA12;
tmpMat[1][0] = matA21;
tmpMat[1][1] = matA22;
MatCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, 2, PETSC_NULL,
&tmpMat[0][0], &petscMatrix);
MatNestSetVecType(petscMatrix, VECNEST);
MatAssemblyBegin(petscMatrix, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(petscMatrix, MAT_FINAL_ASSEMBLY);
MatGetVecs(matA12, &petscRhsVec2, &petscRhsVec1);
MatGetVecs(matA12, &petscSolVec2, &petscSolVec1);
for (int i = 0; i < nComponents; i++)
setDofVector(petscRhsVec, vec->getDOFVector(i), nComponents, i);
Vec tmpVec[2];
tmpVec[0] = petscRhsVec1;
tmpVec[1] = petscRhsVec2;
VecCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, tmpVec, &petscRhsVec);
tmpVec[0] = petscSolVec1;
tmpVec[1] = petscSolVec2;
VecCreateNest(PETSC_COMM_WORLD, 2, PETSC_NULL, tmpVec, &petscSolVec);
VecAssemblyBegin(petscRhsVec1);
VecAssemblyEnd(petscRhsVec1);
VecAssemblyBegin(petscRhsVec2);
VecAssemblyEnd(petscRhsVec2);
VecAssemblyBegin(petscRhsVec);
VecAssemblyEnd(petscRhsVec);
}
......@@ -711,23 +859,138 @@ namespace AMDiS {
PC pc;
KSPCreate(PETSC_COMM_WORLD, &solver);
KSPGetPC(solver, &pc);
PCSetType(pc, PCFIELDSPLIT);
IS interiorIs;
ISCreateGeneral(PETSC_COMM_WORLD, interiorDofs.size(), &(interiorDofs[0]),
PETSC_COPY_VALUES, &interiorIs);
PCFieldSplitSetIS(pc, "interior", interiorIs);
IS boundaryIs;
ISCreateGeneral(PETSC_COMM_WORLD, boundaryDofs.size(), &(boundaryDofs[0]),
PETSC_COPY_VALUES, &boundaryIs);
PCFieldSplitSetIS(pc, "boundary", boundaryIs);
// PCSetType(pc, PCFIELDSPLIT);
const PetscInt interiorField[1] = {0};
const PetscInt boundaryField[1] = {1};
PCFieldSplitSetFields(pc, "interior", 1, interiorField);
PCFieldSplitSetFields(pc, "boundary", 1, boundaryField);
// KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN);
KSPSetTolerances(solver, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetFromOptions(solver);
PCSetFromOptions(pc);
KSPSolve(solver, petscRhsVec, petscSolVec);
KSPDestroy(solver);
VecDestroy(petscRhsVec1);
VecDestroy(petscRhsVec2);
VecDestroy(petscRhsVec);
VecDestroy(petscSolVec1);
VecDestroy(petscSolVec2);
VecDestroy(petscSolVec);
MatDestroy(matA11);
MatDestroy(matA12);
MatDestroy(matA21);
MatDestroy(matA22);
MatDestroy(petscMatrix);
MSG("SOLVED IT!\n");
exit(0);
}
void PetscSolverSchur::setDofMatrix(DOFMatrix* mat, int dispMult,
int dispAddRow, int dispAddCol)
{
FUNCNAME("PetscSolverSchur::setDofMatrix()");
TEST_EXIT(mat)("No DOFMatrix!\n");
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits= mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
traits::col<Matrix>::type col(mat->getBaseMatrix());
traits::const_value<Matrix>::type value(mat->getBaseMatrix());
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> colsBoundary, colsInterior;
vector<double> valuesBoundary, valuesInterior;
colsBoundary.reserve(300);
colsInterior.reserve(300);
valuesBoundary.reserve(300);
valuesInterior.reserve(300);
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
colsBoundary.clear();
colsInterior.clear();
valuesBoundary.clear();
valuesInterior.clear();
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
if (boundaryDofs.count(globalColDof)) {
int colIndex =
mapGlobalBoundaryDof[globalColDof] * dispMult + dispAddCol;
colsBoundary.push_back(colIndex);
valuesBoundary.push_back(value(*icursor));
} else {
int colIndex =
mapGlobalInteriorDof[globalColDof] * dispMult + dispAddCol;
colsInterior.push_back(colIndex);
valuesInterior.push_back(value(*icursor));
}
}
if (boundaryDofs.count(globalRowDof)) {
int rowIndex =
mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAddRow;
MatSetValues(matA22, 1, &rowIndex, colsBoundary.size(),
&(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
MatSetValues(matA21, 1, &rowIndex, colsInterior.size(),
&(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
} else {
int rowIndex =
mapGlobalInteriorDof[globalRowDof] * dispMult + dispAddRow;
MatSetValues(matA11, 1, &rowIndex, colsInterior.size(),
&(colsInterior[0]), &(valuesInterior[0]), ADD_VALUES);
MatSetValues(matA12, 1, &rowIndex, colsBoundary.size(),
&(colsBoundary[0]), &(valuesBoundary[0]), ADD_VALUES);
}
}
}
void PetscSolverSchur::setDofVector(Vec& petscVec, DOFVector<double>* vec,
int dispMult, int dispAdd, bool rankOnly)
{
DOFVector<double>::Iterator dofIt(vec, USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
if (rankOnly && !meshDistributor->getIsRankDof(dofIt.getDOFIndex()))
continue;
// Calculate global row index of the dof.
DegreeOfFreedom globalRowDof =
meshDistributor->mapLocalToGlobal(dofIt.getDOFIndex());
double value = *dofIt;
if (boundaryDofs.count(globalRowDof)) {
int index = mapGlobalBoundaryDof[globalRowDof] * dispMult + dispAdd;
VecSetValues(petscRhsVec2, 1, &index, &value, ADD_VALUES);
} else {
int index = mapGlobalInteriorDof[globalRowDof] * dispMult + dispAdd;
VecSetValues(petscRhsVec1, 1, &index, &value, ADD_VALUES);
}
}
}
#endif
......
......@@ -23,15 +23,19 @@
#ifndef AMDIS_PETSC_SOLVER_H
#define AMDIS_PETSC_SOLVER_H
#include <set>
#include <map>
#include <mpi.h>
#include "AMDiS_fwd.h"
#include "Global.h"
#include "Parameters.h"
#include "DOFMatrix.h"
#include "parallel/MeshDistributor.h"
#include "petsc.h"
#include "petscsys.h"
#include "petscao.h"
#include <petsc.h>
#include <petscsys.h>
#include <petscao.h>
namespace AMDiS {
......@@ -40,12 +44,17 @@ namespace AMDiS {