Commit 3a67bd7b authored by Backofen, Rainer's avatar Backofen, Rainer

bug fix for serial petsc solver and workaround to get Rosenbrock working in...

bug fix for serial petsc solver and workaround to get Rosenbrock working in parallel (but inefficient)
parent 5783168b
......@@ -188,10 +188,10 @@ namespace AMDiS {
vecSol.destroy();
vecRhs.destroy();
if (!storeMatrixData)
if (!storeMatrixData){
petscMat.destroy();
runner.exit();
runner.exit();
}
return error;
}
......
......@@ -73,7 +73,10 @@ namespace AMDiS {
*newUn = *unVec;
*lowSol = *unVec;
for (int i = 0; i < nComponents; i++) {
std::cout <<"RRDDRX "<< i<<" "<< lowSol->getDOFVector(i)->Int() <<" "<<
newUn->getDOFVector(i)->Int()<<" "<< *tauPtr<<"\n";
}
for (int i = 0; i < rm->getStages(); i++) {
*stageSolution = *unVec;
for (int j = 0; j < i; j++) {
......@@ -95,7 +98,14 @@ namespace AMDiS {
}
ProblemStat::buildAfterCoarsen(adaptInfo, flag, (i == 0), asmVector);
#if defined HAVE_PARALLEL_PETSC
// TODO: Problems with reuse of Matrix with parallel PETSC-Solvers
// Thus, Rosenbrock not efficient but working (Rainer)
ProblemStat::solve(adaptInfo, true , false);
#else
ProblemStat::solve(adaptInfo, i == 0, i + 1 < rm->getStages());
#endif
*(stageSolutions[i]) = *solution;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment