Commit bb1e643b authored by Thomas Witkowski's avatar Thomas Witkowski

SubDomainSolver interface for sequentiel sub solvers in FETI-DP method is...

SubDomainSolver interface for sequentiel sub solvers in FETI-DP method is fixed and should work now.
parent bc02aae5
......@@ -95,6 +95,8 @@ namespace AMDiS {
petscSolver->solvePetscMatrix(*solution, adaptInfo);
petscSolver->destroyVectorData();
if (!storeMatrixData)
petscSolver->destroyMatrixData();
......
......@@ -79,6 +79,9 @@ namespace AMDiS {
/// Destroys all matrix data structures.
virtual void destroyMatrixData() = 0;
/// Detroys all vector data structures.
virtual void destroyVectorData() = 0;
virtual Flag getBoundaryDofRequirement()
{
return 0;
......
......@@ -30,10 +30,10 @@ namespace AMDiS {
MatShellGetContext(mat, &ctx);
SchurPrimalData* data = static_cast<SchurPrimalData*>(ctx);
MatMult(*(data->mat_b_primal), x, data->tmp_vec_b);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_primal_b), data->tmp_vec_b, data->tmp_vec_primal);
MatMult(*(data->mat_primal_primal), x, y);
MatMult(data->subSolver->getMatIntCoarse(), x, data->tmp_vec_b);
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
MatMult(data->subSolver->getMatCoarseInt(), data->tmp_vec_b, data->tmp_vec_primal);
MatMult(data->subSolver->getMatCoarseCoarse(), x, y);
VecAXPBY(y, -1.0, 1.0, data->tmp_vec_primal);
return 0;
......@@ -51,13 +51,13 @@ namespace AMDiS {
FetiData* data = static_cast<FetiData*>(ctx);
MatMultTranspose(*(data->mat_lagrange), x, data->tmp_vec_b);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b, data->tmp_vec_b);
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_lagrange), data->tmp_vec_b, data->tmp_vec_lagrange);
MatMult(*(data->mat_primal_b), data->tmp_vec_b, data->tmp_vec_primal);
MatMult(data->subSolver->getMatCoarseInt(), data->tmp_vec_b, data->tmp_vec_primal);
KSPSolve(*(data->ksp_schur_primal), data->tmp_vec_primal, data->tmp_vec_primal);
MatMult(*(data->mat_b_primal), data->tmp_vec_primal, data->tmp_vec_b);
data->fetiSolver->solveLocalProblem(data->tmp_vec_b, data->tmp_vec_b);
MatMult(data->subSolver->getMatIntCoarse(), data->tmp_vec_primal, data->tmp_vec_b);
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
MatMult(*(data->mat_lagrange), data->tmp_vec_b, y);
VecAXPBY(y, 1.0, 1.0, data->tmp_vec_lagrange);
......@@ -487,10 +487,7 @@ namespace AMDiS {
if (schurPrimalSolver == 0) {
MSG("Create iterative schur primal solver!\n");
schurPrimalData.mat_primal_primal = &(subDomainSolver->getMatCoarseCoarse());
schurPrimalData.mat_primal_b = &(subDomainSolver->getMatCoarseInt());
schurPrimalData.mat_b_primal = &(subDomainSolver->getMatIntCoarse());
schurPrimalData.fetiSolver = this;
schurPrimalData.subSolver = subDomainSolver;
VecCreateMPI(mpiComm,
localDofMap.getRankDofs(), localDofMap.getOverallDofs(),
......@@ -579,20 +576,20 @@ namespace AMDiS {
MatAssemblyBegin(matBPi, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matBPi, MAT_FINAL_ASSEMBLY);
MatDuplicate(subDomainSolver->getMatCoarseCoarse(), MAT_COPY_VALUES, &mat_schur_primal);
MatMatMult(subDomainSolver->getMatCoarseInt(), matBPi, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &matPrimal);
MatAXPY(subDomainSolver->getMatCoarseCoarse(), -1.0, matPrimal, DIFFERENT_NONZERO_PATTERN);
MatAXPY(mat_schur_primal, -1.0, matPrimal, DIFFERENT_NONZERO_PATTERN);
MatDestroy(&matPrimal);
MatDestroy(&matBPi);
MatInfo minfo;
MatGetInfo(subDomainSolver->getMatCoarseCoarse(), MAT_GLOBAL_SUM, &minfo);
MatGetInfo(mat_schur_primal, MAT_GLOBAL_SUM, &minfo);
MSG("Schur primal matrix nnz = %f\n", minfo.nz_used);
KSPCreate(mpiComm, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal,
subDomainSolver->getMatCoarseCoarse(),
subDomainSolver->getMatCoarseCoarse(),
KSPSetOperators(ksp_schur_primal, mat_schur_primal, mat_schur_primal,
SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "schur_primal_");
KSPSetType(ksp_schur_primal, KSPPREONLY);
......@@ -613,19 +610,14 @@ namespace AMDiS {
FUNCNAME("PetscSolverFeti::destroySchurPrimal()");
if (schurPrimalSolver == 0) {
schurPrimalData.mat_primal_primal = PETSC_NULL;
schurPrimalData.mat_primal_b = PETSC_NULL;
schurPrimalData.mat_b_primal = PETSC_NULL;
schurPrimalData.fetiSolver = NULL;
schurPrimalData.subSolver = NULL;
VecDestroy(&schurPrimalData.tmp_vec_b);
VecDestroy(&schurPrimalData.tmp_vec_primal);
}
MatDestroy(&mat_schur_primal);
KSPDestroy(&ksp_schur_primal);
} else {
KSPDestroy(&ksp_schur_primal);
}
}
......@@ -635,10 +627,8 @@ namespace AMDiS {
// === Create FETI-DP solver object. ===
fetiData.mat_primal_b = &(subDomainSolver->getMatCoarseInt());
fetiData.mat_b_primal = &(subDomainSolver->getMatIntCoarse());
fetiData.mat_lagrange = &mat_lagrange;
fetiData.fetiSolver = this;
fetiData.subSolver = subDomainSolver;
fetiData.ksp_schur_primal = &ksp_schur_primal;
VecCreateMPI(mpiComm,
......@@ -765,10 +755,8 @@ namespace AMDiS {
// === Destroy FETI-DP solver object. ===
fetiData.mat_primal_b = PETSC_NULL;
fetiData.mat_b_primal = PETSC_NULL;
fetiData.mat_lagrange = PETSC_NULL;
fetiData.fetiSolver = NULL;
fetiData.subSolver = NULL;
fetiData.ksp_schur_primal = PETSC_NULL;
VecDestroy(&fetiData.tmp_vec_b);
......@@ -933,26 +921,6 @@ namespace AMDiS {
int nRowsRankPrimal = primalDofMap.getRankDofs();
int nRowsOverallPrimal = primalDofMap.getOverallDofs();
#if 0
MatCreateSeqAIJ(PETSC_COMM_SELF, nRowsRankB, nRowsRankB, 60, PETSC_NULL,
&mat_b_b);
MatCreateMPIAIJ(mpiComm,
nRowsRankPrimal, nRowsRankPrimal,
nRowsOverallPrimal, nRowsOverallPrimal,
60, PETSC_NULL, 60, PETSC_NULL, &mat_primal_primal);
MatCreateMPIAIJ(mpiComm,
nRowsRankB, nRowsRankPrimal,
nRowsOverallB, nRowsOverallPrimal,
60, PETSC_NULL, 60, PETSC_NULL, &mat_b_primal);
MatCreateMPIAIJ(mpiComm,
nRowsRankPrimal, nRowsRankB,
nRowsOverallPrimal, nRowsOverallB,
30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b);
#endif
subDomainSolver->fillPetscMatrix(mat);
......@@ -983,6 +951,8 @@ namespace AMDiS {
}
if (fetiPreconditioner != FETI_NONE) {
// === Prepare traverse of sequentially created matrices. ===
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
......@@ -992,13 +962,6 @@ namespace AMDiS {
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
vector<int> cols, colsOther;
vector<double> values, valuesOther;
cols.reserve(300);
colsOther.reserve(300);
values.reserve(300);
valuesOther.reserve(300);
vector<int> colsLocal, colsLocalOther;
vector<double> valuesLocal, valuesLocalOther;
colsLocal.reserve(300);
......@@ -1025,11 +988,6 @@ namespace AMDiS {
bool rowPrimal = isPrimal(feSpaces[i], *cursor);
cols.clear();
colsOther.clear();
values.clear();
valuesOther.clear();
colsLocal.clear();
colsLocalOther.clear();
valuesLocal.clear();
......@@ -1042,28 +1000,6 @@ namespace AMDiS {
bool colPrimal = isPrimal(feSpaces[j], col(*icursor));
if (colPrimal) {
if (rowPrimal) {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
} else {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
}
} else {
if (rowPrimal) {
colsOther.push_back(col(*icursor));
valuesOther.push_back(value(*icursor));
} else {
cols.push_back(col(*icursor));
values.push_back(value(*icursor));
}
}
// === For preconditioner ===
if (fetiPreconditioner != FETI_NONE) {
if (!rowPrimal && !colPrimal) {
if (!isDual(feSpaces[i], *cursor)) {
if (!isDual(feSpaces[j], col(*icursor))) {
......@@ -1087,57 +1023,9 @@ namespace AMDiS {
}
}
}
}
} // for each nnz in row
// === Set matrix values. ===
if (rowPrimal) {
int rowIndex = primalDofMap.getMatIndex(i, *cursor);
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = primalDofMap.getMatIndex(j, cols[k]);
#if 0
MatSetValues(mat_primal_primal, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
#endif
if (colsOther.size()) {
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = localDofMap.getMatIndex(j, colsOther[k]);
#if 0
MatSetValues(mat_primal_b, 1, &rowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
#endif
}
} else {
int localRowIndex = localDofMap.getLocalMatIndex(i, *cursor);
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] = localDofMap.getLocalMatIndex(j, cols[k]);
#if 0
MatSetValues(mat_b_b, 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
#endif
if (colsOther.size()) {
int globalRowIndex = localDofMap.getMatIndex(i, *cursor);
for (unsigned int k = 0; k < colsOther.size(); k++)
colsOther[k] = primalDofMap.getMatIndex(j, colsOther[k]);
#if 0
MatSetValues(mat_b_primal, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
#endif
}
}
// === Set matrix values for preconditioner ===
if (!rowPrimal) {
......@@ -1175,22 +1063,6 @@ namespace AMDiS {
}
}
#if 0
// === Start global assembly procedure. ===
MatAssemblyBegin(mat_b_b, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_b_b, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(mat_primal_primal, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_primal_primal, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(mat_b_primal, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_b_primal, MAT_FINAL_ASSEMBLY);
MatAssemblyBegin(mat_primal_b, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_primal_b, MAT_FINAL_ASSEMBLY);
#endif
// === Start global assembly procedure for preconditioner matrices. ===
if (fetiPreconditioner != FETI_NONE) {
......@@ -1208,7 +1080,7 @@ namespace AMDiS {
MatAssemblyBegin(mat_duals_interior, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(mat_duals_interior, MAT_FINAL_ASSEMBLY);
}
}
// === Create and fill PETSc matrix for Lagrange constraints. ===
......@@ -1230,69 +1102,7 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::fillPetscRhs()");
#if 0
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(vec);
VecCreateMPI(mpiComm,
localDofMap.getRankDofs(), localDofMap.getOverallDofs(), &f_b);
VecCreateMPI(mpiComm,
primalDofMap.getRankDofs(), primalDofMap.getOverallDofs(),
&f_primal);
for (unsigned int i = 0; i < feSpaces.size(); i++) {
DOFVector<double>::Iterator dofIt(vec->getDOFVector(i), USED_DOFS);
for (dofIt.reset(); !dofIt.end(); ++dofIt) {
int index = dofIt.getDOFIndex();
if (isPrimal(feSpaces[i], index)) {
index = primalDofMap.getMatIndex(i, index);
VecSetValue(f_primal, index, *dofIt, ADD_VALUES);
} else {
index = localDofMap.getMatIndex(i, index);
VecSetValue(f_b, index, *dofIt, INSERT_VALUES);
}
}
}
VecAssemblyBegin(f_b);
VecAssemblyEnd(f_b);
VecAssemblyBegin(f_primal);
VecAssemblyEnd(f_primal);
#else
subDomainSolver->fillPetscRhs(vec);
#endif
}
void PetscSolverFeti::solveLocalProblem(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolverFeti::solveLocalProblem()");
Vec tmp;
VecCreateSeq(PETSC_COMM_SELF, localDofMap.getRankDofs(), &tmp);
PetscScalar *tmpValues, *rhsValues;
VecGetArray(tmp, &tmpValues);
VecGetArray(rhs, &rhsValues);
for (int i = 0; i < localDofMap.getRankDofs(); i++)
tmpValues[i] = rhsValues[i];
VecRestoreArray(rhs, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
subDomainSolver->solve(tmp, tmp);
VecGetArray(tmp, &tmpValues);
VecGetArray(sol, &rhsValues);
for (int i = 0; i < localDofMap.getRankDofs(); i++)
rhsValues[i] = tmpValues[i];
VecRestoreArray(sol, &rhsValues);
VecRestoreArray(tmp, &tmpValues);
VecDestroy(&tmp);
}
......@@ -1320,18 +1130,19 @@ namespace AMDiS {
localDofMap.getRankDofs(), localDofMap.getOverallDofs(), &tmp_b0);
VecCreateMPI(mpiComm,
localDofMap.getRankDofs(), localDofMap.getOverallDofs(), &tmp_b1);
VecCreateMPI(mpiComm,
primalDofMap.getRankDofs(), primalDofMap.getOverallDofs(), &tmp_primal0);
VecCreateMPI(mpiComm,
primalDofMap.getRankDofs(), primalDofMap.getOverallDofs(), &tmp_primal1);
MatGetVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange0);
MatGetVecs(mat_lagrange, PETSC_NULL, &vec_rhs);
MatGetVecs(subDomainSolver->getMatCoarseCoarse(), PETSC_NULL, &tmp_primal0);
MatGetVecs(subDomainSolver->getMatCoarseCoarse(), PETSC_NULL, &tmp_primal1);
// === Create new rhs ===
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vec_rhs = L * inv(K_BB) * f_B
solveLocalProblem(subDomainSolver->getRhsInterior(), tmp_b0);
subDomainSolver->solveGlobal(subDomainSolver->getRhsInterior(), tmp_b0);
MatMult(mat_lagrange, tmp_b0, vec_rhs);
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
......@@ -1345,7 +1156,7 @@ namespace AMDiS {
//
MatMult(subDomainSolver->getMatIntCoarse(), tmp_primal0, tmp_b0);
solveLocalProblem(tmp_b0, tmp_b0);
subDomainSolver->solveGlobal(tmp_b0, tmp_b0);
MatMult(mat_lagrange, tmp_b0, tmp_lagrange0);
//
......@@ -1358,11 +1169,11 @@ namespace AMDiS {
// === Solve for u_primals. ===
VecCopy(subDomainSolver->getRhsCoarseSpace(), tmp_primal0);
solveLocalProblem(subDomainSolver->getRhsInterior(), tmp_b0);
subDomainSolver->solveGlobal(subDomainSolver->getRhsInterior(), tmp_b0);
MatMult(subDomainSolver->getMatCoarseInt(), tmp_b0, tmp_primal1);
VecAXPBY(tmp_primal0, -1.0, 1.0, tmp_primal1);
MatMultTranspose(mat_lagrange, vec_rhs, tmp_b0);
solveLocalProblem(tmp_b0, tmp_b0);
subDomainSolver->solveGlobal(tmp_b0, tmp_b0);
MatMult(subDomainSolver->getMatCoarseInt(), tmp_b0, tmp_primal1);
VecAXPBY(tmp_primal0, 1.0, 1.0, tmp_primal1);
......@@ -1377,28 +1188,18 @@ namespace AMDiS {
MatMult(subDomainSolver->getMatIntCoarse(), tmp_primal0, tmp_b1);
VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
solveLocalProblem(tmp_b0, tmp_b0);
subDomainSolver->solveGlobal(tmp_b0, tmp_b0);
// === And recover AMDiS solution vectors. ===
recoverSolution(tmp_b0, tmp_primal0, vec);
// === Destroy all data structures. ===
VecDestroy(&vec_rhs);
VecDestroy(&tmp_b0);
VecDestroy(&tmp_b1);
VecDestroy(&tmp_lagrange0);
VecDestroy(&tmp_primal0);
VecDestroy(&tmp_primal1);
#if 0
VecDestroy(&f_b);
VecDestroy(&f_primal);
#else
subDomainSolver->solvePetscMatrix(vec, NULL);
#endif
}
......@@ -1406,13 +1207,6 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::destroyMatrixData()");
#if 0
MatDestroy(&mat_b_b);
MatDestroy(&mat_primal_primal);
MatDestroy(&mat_b_primal);
MatDestroy(&mat_primal_b);
#endif
MatDestroy(&mat_lagrange);
// === Destroy preconditioner data structures. ===
......@@ -1432,6 +1226,14 @@ namespace AMDiS {
}
void PetscSolverFeti::destroyVectorData()
{
FUNCNAME("PetscSolverFeti::destroyVectorData()");
subDomainSolver->destroyVectorData();
}
void PetscSolverFeti::solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo)
{
FUNCNAME("PetscSolverFeti::solvePetscMatrix()");
......
......@@ -56,6 +56,9 @@ namespace AMDiS {
/// Destroys all matrix data structures.
void destroyMatrixData();
/// Detroys all vector data structures.
void destroyVectorData();
/// Returns flags to denote which information of the boundary DOFs are
/// required by the FETI-DP solver.
Flag getBoundaryDofRequirement()
......@@ -66,8 +69,6 @@ namespace AMDiS {
MeshDistributor::BOUNDARY_FILL_INFO_RECV_DOFS;
}
void solveLocalProblem(Vec &rhs, Vec &sol);
protected:
/// After mesh changes, or if the solver is called the first time, this
/// function creates all matrix and vector objects with the approriated
......@@ -179,32 +180,9 @@ namespace AMDiS {
/// ranks in which the DOF is contained in.
map<const FiniteElemSpace*, DofIndexToPartitions> boundaryDofRanks;
#if 0
/// Global PETSc matrix of non primal variables.
Mat mat_b_b;
/// Global PETSc matrix of primal variables.
Mat mat_primal_primal;
/// Global PETSc matrices that connect the primal with the non
/// primal variables.
Mat mat_b_primal, mat_primal_b;
#endif
/// Global PETSc matrix of Lagrange variables.
Mat mat_lagrange;
#if 0
/// Right hand side PETSc vectors for primal and non primal variables.
Vec f_b, f_primal;
#endif
#if 0
/// PETSc solver object that inverts the matrix of non primal
/// variables, \ref mat_b_b
KSP ksp_b;
#endif
/// 0: Solve the Schur complement on primal variables with iterative solver.
/// 1: Create the Schur complement matrix explicitly and solve it with a
/// direct solver.
......
......@@ -24,6 +24,7 @@
#define AMDIS_PETSC_SOLVER_FETI_STRUCTS_H
#include <map>
#include "parallel/SubDomainSolver.h"
namespace AMDiS {
......@@ -36,19 +37,13 @@ namespace AMDiS {
* primal schur complement. \ref petscMultMatSchurPrimal
*/
struct SchurPrimalData {
/// Pointers to the matrix containing the primal variables.
Mat *mat_primal_primal;
/// Coupling matrices between the primal and the B variables.
Mat *mat_primal_b, *mat_b_primal;
/// Temporal vector on the B variables.
Vec tmp_vec_b;
/// Temporal vecor in the primal variables.
Vec tmp_vec_primal;
PetscSolverFeti *fetiSolver;
SubDomainSolver* subSolver;
};
......@@ -58,9 +53,6 @@ namespace AMDiS {
* \ref petscMultMatFeti
*/
struct FetiData {
/// Coupling matrices between the primal and the B variables.
Mat *mat_primal_b, *mat_b_primal;
/// Matrix of Lagrange variables.
Mat *mat_lagrange;
......@@ -73,7 +65,7 @@ namespace AMDiS {
/// Temporal vector on the lagrange variables.
Vec tmp_vec_lagrange;
PetscSolverFeti *fetiSolver;
SubDomainSolver* subSolver;
/// Pointer to the solver of the schur complement on the primal variables.
KSP *ksp_schur_primal;
......
......@@ -164,14 +164,6 @@ namespace AMDiS {
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// === more than one partition. ===
meshDistributor->synchVector(vec);
// === Destroy PETSc's variables. ===
VecDestroy(&petscRhsVec);
for (int i = 0; i < nComponents; i++)
VecDestroy(&(nestVec[i]));
VecDestroy(&petscSolVec);
}
......@@ -188,6 +180,18 @@ namespace AMDiS {
}
void PetscSolverGlobalBlockMatrix::destroyVectorData()
{
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");
VecDestroy(&petscRhsVec);
for (int i = 0; i < nComponents; i++)
VecDestroy(&(nestVec[i]));
VecDestroy(&petscSolVec);
}
void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat,
DOFMatrix* mat,
int dispRowBlock,
......
......@@ -44,10 +44,12 @@ namespace AMDiS {
void fillPetscRhs(SystemVector *vec);
virtual void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void solvePetscMatrix(SystemVector &vec, AdaptInfo *adaptInfo);
void destroyMatrixData();
void destroyVectorData();
protected:
/// Takes a DOF matrix and sends the values to the global PETSc matrix.
void setDofMatrix(Mat& petscMat, DOFMatrix* mat,
......
......@@ -211,10 +211,6 @@ namespace AMDiS {
// Print iteration counter and residual norm of the solution.
printSolutionInfo(adaptInfo);