Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
0e885e23
Commit
0e885e23
authored
Nov 11, 2011
by
Thomas Witkowski
Browse files
Rosenbrock now should work also in parallel, tested only for FETI-DP solver.
parent
02638872
Changes
18
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/ProblemInstat.h
View file @
0e885e23
...
...
@@ -64,7 +64,7 @@ namespace AMDiS {
virtual
void
solve
(
AdaptInfo
*
adaptInfo
)
{}
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
,
bool
)
{
solve
(
adaptInfo
);
}
...
...
AMDiS/src/ProblemStat.cc
View file @
0e885e23
...
...
@@ -540,7 +540,7 @@ namespace AMDiS {
}
void
ProblemStatSeq
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
void
ProblemStatSeq
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
,
bool
)
{
FUNCNAME
(
"Problem::solve()"
);
...
...
AMDiS/src/ProblemStat.h
View file @
0e885e23
...
...
@@ -67,7 +67,7 @@ namespace AMDiS {
public:
/// Constructor
ProblemStatSeq
(
string
nameStr
,
ProblemIterationInterface
*
problemIteration
=
NULL
)
ProblemIterationInterface
*
problemIteration
=
NULL
)
:
StandardProblemIteration
(
this
),
name
(
nameStr
),
nComponents
(
-
1
),
...
...
@@ -147,7 +147,9 @@ namespace AMDiS {
* Implementation of ProblemStatBase::solve(). Deligates the solving
* of problems system to \ref solver.
*/
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
=
false
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
);
/** \brief
* Implementation of ProblemStatBase::estimate(). Deligates the estimation
...
...
AMDiS/src/ProblemStatBase.h
View file @
0e885e23
...
...
@@ -104,8 +104,27 @@ namespace AMDiS {
/// Coarsening of the mesh.
virtual
Flag
coarsenMesh
(
AdaptInfo
*
adaptInfo
)
=
0
;
/// Solves the assembled system. The result is an approximative solution.
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
=
0
;
/** \brief
* Solves the assembled system. The result is an approximative solution.
* The last two boolean arguments can be used to controll successive
* solutions of systems with the same matrix.
*
* \param adaptInfo Pointer to an \ref AdaptInfo object.
* \param createMatrixData If false, the solver assumes that all of its
* internal data structures for the system
* matrix are already created. This is the case,
* if we solve different systems but with the
* same matrix. After the first call to this
* function (with this parameter set to true),
* all other calls may set it to false.
* \param storeMatrixData If true, all internal data structures for the
* system matrix are not deleted such that they
* can be used for next solutions with the same
* system matrix.
*/
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
)
=
0
;
/** \brief
* A posteriori error estimation of the calculated solution. Should store
...
...
AMDiS/src/StandardProblemIteration.cc
View file @
0e885e23
...
...
@@ -37,10 +37,10 @@ namespace AMDiS {
Flag
flag
=
buildAndAdapt
(
adaptInfo
,
toDo
);
if
(
toDo
.
isSet
(
SOLVE
))
problem
->
solve
(
adaptInfo
,
false
);
problem
->
solve
(
adaptInfo
,
true
,
false
);
if
(
toDo
.
isSet
(
SOLVE_RHS
))
problem
->
solve
(
adaptInfo
,
true
);
problem
->
solve
(
adaptInfo
,
true
,
false
);
if
(
toDo
.
isSet
(
ESTIMATE
))
problem
->
estimate
(
adaptInfo
);
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
0e885e23
...
...
@@ -61,7 +61,9 @@ namespace AMDiS {
}
void
PetscProblemStat
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
void
PetscProblemStat
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
,
bool
storeMatrixData
)
{
FUNCNAME
(
"PetscProblemStat::solve()"
);
...
...
@@ -79,8 +81,13 @@ namespace AMDiS {
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
petscSolver
->
setMeshDistributor
(
meshDistributor
);
petscSolver
->
fillPetscMatrix
(
systemMatrix
,
rhs
);
if
(
createMatrixData
)
{
petscSolver
->
setMeshDistributor
(
meshDistributor
);
petscSolver
->
fillPetscMatrix
(
systemMatrix
);
}
petscSolver
->
fillPetscRhs
(
rhs
);
#if 0
processMemUsage(vm, rss);
...
...
@@ -93,6 +100,9 @@ namespace AMDiS {
petscSolver
->
solvePetscMatrix
(
*
solution
,
adaptInfo
);
if
(
!
storeMatrixData
)
petscSolver
->
destroyMatrixData
();
#if 0
processMemUsage(vm, rss);
MSG("STAGE 3\n");
...
...
@@ -102,7 +112,6 @@ namespace AMDiS {
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
INFO
(
info
,
8
)(
"solution of discrete system needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
0e885e23
...
...
@@ -48,7 +48,9 @@ namespace AMDiS {
ProblemStatSeq
*
adoptProblem
=
NULL
,
Flag
adoptFlag
=
INIT_NOTHING
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
=
false
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
);
protected:
PetscSolver
*
petscSolver
;
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
0e885e23
...
...
@@ -60,18 +60,26 @@ namespace AMDiS {
}
/** \brief
* Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
* create the nnz structure of the PETSc matrix and the values are transfered to it.
* The given DOF vectors are used to the the values of the PETSc rhs vector.
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it.
*
* \param[in] mat
*/
virtual
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
=
0
;
/** \brief
* Create a PETSc vector and fills it with the rhs values of the system.
*
* \param[in] vec
*/
virtual
void
fillPetsc
Matrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
=
0
;
virtual
void
fillPetsc
Rhs
(
SystemVector
*
vec
)
=
0
;
/// Use PETSc to solve the linear system of equations
virtual
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
=
0
;
/// Destroys all matrix data structures.
virtual
void
destroyMatrixData
()
=
0
;
virtual
Flag
getBoundaryDofRequirement
()
{
return
0
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
0e885e23
...
...
@@ -595,8 +595,13 @@ namespace AMDiS {
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
ksp_b
=
&
ksp_b
;
VecDuplicate
(
f_b
,
&
(
schurPrimalData
.
tmp_vec_b
));
VecDuplicate
(
f_primal
,
&
(
schurPrimalData
.
tmp_vec_primal
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
(
schurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
...
...
@@ -643,9 +648,15 @@ namespace AMDiS {
fetiData
.
ksp_b
=
&
ksp_b
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecDuplicate
(
f_b
,
&
(
fetiData
.
tmp_vec_b0
));
VecDuplicate
(
f_b
,
&
(
fetiData
.
tmp_vec_b1
));
VecDuplicate
(
f_primal
,
&
(
fetiData
.
tmp_vec_primal
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiData
.
tmp_vec_b0
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiData
.
tmp_vec_b1
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
(
fetiData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
nRankLagrange
*
nComponents
,
nRankLagrange
*
nComponents
,
...
...
@@ -681,7 +692,9 @@ namespace AMDiS {
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
VecDuplicate
(
f_b
,
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals1
));
MatGetVecs
(
mat_interior_interior
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_interior
));
...
...
@@ -697,7 +710,9 @@ namespace AMDiS {
fetiLumpedPreconData
.
mat_lagrange_scaled
=
&
mat_lagrange_scaled
;
fetiLumpedPreconData
.
mat_duals_duals
=
&
mat_duals_duals
;
VecDuplicate
(
f_b
,
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals1
));
...
...
@@ -851,12 +866,11 @@ namespace AMDiS {
}
void
PetscSolverFeti
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
void
PetscSolverFeti
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
{
FUNCNAME
(
"PetscSolverFeti::fillPetscMatrix()"
);
nComponents
=
vec
->
getSize
();
nComponents
=
mat
->
getSize
();
// === Create all sets and indices. ===
...
...
@@ -1156,16 +1170,40 @@ namespace AMDiS {
}
// === Create and fill PETSc's right hand side vectors. ===
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
();
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
();
// === Create solver for the non primal (thus local) variables. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_b
);
KSPSetOperators
(
ksp_b
,
mat_b_b
,
mat_b_b
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_b
,
"solver_b_"
);
KSPSetFromOptions
(
ksp_b
);
}
VecCreate
(
PETSC_COMM_WORLD
,
&
f_b
);
VecSetSizes
(
f_b
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
);
VecSetType
(
f_b
,
VECMPI
);
void
PetscSolverFeti
::
fillPetscRhs
(
SystemVector
*
vec
)
{
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
int
nComponents
=
vec
->
getSize
();
VecCreate
(
PETSC_COMM_WORLD
,
&
f_primal
);
VecSetSizes
(
f_primal
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
);
VecSetType
(
f_primal
,
VECMPI
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
f_b
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
f_primal
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
...
...
@@ -1194,21 +1232,6 @@ namespace AMDiS {
VecAssemblyBegin
(
f_primal
);
VecAssemblyEnd
(
f_primal
);
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
();
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
();
}
...
...
@@ -1216,11 +1239,9 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::solveFetiMatrix()"
);
// Create transpose of Lagrange matrix.
Mat
mat_lagrange_transpose
;
Mat
mat_lagrange_transpose
;
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
mat_lagrange_transpose
);
// === Create nested matrix which will contain the overall FETI system. ===
Mat
A
;
...
...
@@ -1261,12 +1282,8 @@ namespace AMDiS {
// === Create rhs and solution vectors for the overall FETI system. ===
Vec
f
;
VecCreate
(
PETSC_COMM_WORLD
,
&
f
);
VecSetSizes
(
f
,
nRankNest
,
nOverallNest
);
VecSetType
(
f
,
VECMPI
);
Vec
b
;
Vec
f
,
b
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankNest
,
nOverallNest
,
&
f
);
VecDuplicate
(
f
,
&
b
);
...
...
@@ -1352,14 +1369,7 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
// === Create solver for the non primal (thus local) variables. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_b
);
KSPSetOperators
(
ksp_b
,
mat_b_b
,
mat_b_b
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_b
,
"solver_b_"
);
KSPSetFromOptions
(
ksp_b
);
// RHS and solution vector.
// RHS vector.
Vec
vec_rhs
;
// Some temporary vectors.
...
...
@@ -1444,8 +1454,14 @@ namespace AMDiS {
VecDestroy
(
&
tmp_primal0
);
VecDestroy
(
&
tmp_primal1
);
VecDestroy
(
&
f_b
);
VecDestroy
(
&
f_primal
);
}
KSPDestroy
(
&
ksp_b
);
void
PetscSolverFeti
::
destroyMatrixData
()
{
FUNCNAME
(
"PetscSolverFeti::destroyMatrixData()"
);
MatDestroy
(
&
mat_b_b
);
MatDestroy
(
&
mat_primal_primal
);
...
...
@@ -1453,14 +1469,6 @@ namespace AMDiS {
MatDestroy
(
&
mat_primal_b
);
MatDestroy
(
&
mat_lagrange
);
VecDestroy
(
&
f_b
);
VecDestroy
(
&
f_primal
);
destroySchurPrimalKsp
();
destroyFetiKsp
();
// === Destroy preconditioner data structures. ===
if
(
fetiPreconditioner
!=
FETI_NONE
)
...
...
@@ -1471,8 +1479,13 @@ namespace AMDiS {
MatDestroy
(
&
mat_interior_duals
);
MatDestroy
(
&
mat_duals_interior
);
}
}
destroySchurPrimalKsp
();
destroyFetiKsp
();
KSPDestroy
(
&
ksp_b
);
}
void
PetscSolverFeti
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
{
...
...
@@ -1490,6 +1503,7 @@ namespace AMDiS {
}
}
#endif
}
AMDiS/src/parallel/PetscSolverFeti.h
View file @
0e885e23
...
...
@@ -133,13 +133,19 @@ namespace AMDiS {
public:
PetscSolverFeti
();
/// Assemble the sequentially created matrices and vectors to the
/// global matrices and vectors required by the FETI-DP method.
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
);
/// Assemble the sequentially created matrices to the global matrices
/// required by the FETI-DP method.
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
);
/// Assembles the global rhs vectors from the sequentially created ones.
void
fillPetscRhs
(
SystemVector
*
vec
);
/// Solve the system using FETI-DP method.
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
/// Destroys all matrix data structures.
void
destroyMatrixData
();
/// Returns flags to denote which information of the boundary DOFs are
/// required by the FETI-DP solver.
Flag
getBoundaryDofRequirement
()
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
0e885e23
...
...
@@ -25,36 +25,28 @@ namespace AMDiS {
}
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
int
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
// === Create PETSc vector (rhs, solution and a temporary vector). ===
VecCreate
(
PETSC_COMM_WORLD
,
&
petscRhsVec
);
VecSetSizes
(
petscRhsVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscRhsVec
,
VECMPI
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscSolVec
);
VecSetSizes
(
petscSolVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscSolVec
,
VECMPI
);
// === Create PETSc vector (solution and a temporary vector). ===
VecCreate
(
PETSC_COMM_WORLD
,
&
petscTmpVec
);
VecSetSizes
(
petscTmpVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscTmpVec
,
VECMPI
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
meshDistributor
->
getMpiComm
().
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
meshDistributor
->
getMpiComm
().
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
!
d_nnz
||
recvAllValues
!=
0
)
{
if
(
d_nnz
)
{
...
...
@@ -102,10 +94,21 @@ namespace AMDiS {
MatAssemblyBegin
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 3 needed %.5f seconds
\n
"
,
TIME_USED
(
MPI
::
Wtime
(),
wtime
));
#endif
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
void
PetscSolverGlobalMatrix
::
fillPetscRhs
(
SystemVector
*
vec
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscRhs()"
);
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
int
nComponents
=
vec
->
getSize
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
// === Transfer values from DOF vector to the PETSc vector. ===
...
...
@@ -114,12 +117,11 @@ namespace AMDiS {
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
void
PetscSolverGlobalMatrix
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
void
PetscSolverGlobalMatrix
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::solvePetscMatrix()"
);
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
View file @
0e885e23
...
...
@@ -44,10 +44,15 @@ namespace AMDiS {
Parameters
::
get
(
"parallel->use zero start vector"
,
zeroStartVector
);
}
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
);
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
);
void
fillPetscRhs
(
SystemVector
*
vec
);
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
void
destroyMatrixData
()
{}
protected:
/// Creates a new non zero pattern structure for the PETSc matrix.
void
createPetscNnzStructure
(
Matrix
<
DOFMatrix
*>
*
mat
);
...
...
AMDiS/src/parallel/PetscSolverSchur.cc
View file @
0e885e23
...
...
@@ -178,7 +178,7 @@ namespace AMDiS {
}
void
PetscSolverSchur
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
void
PetscSolverSchur
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
{
FUNCNAME
(
"PetscSolverSchur::fillPetscMatrix()"
);
...
...
@@ -248,18 +248,20 @@ namespace AMDiS {
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
VecCreate
(
PETSC_COMM_WORLD
,
&
petscRhsVec
);
VecSetSizes
(
petscRhsVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscRhsVec
,
VECMPI
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
}
VecCreate
(
PETSC_COMM_WORLD
,
&
petscSolV
ec
)
;
VecSetSizes
(
petscSolVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscSolVec
,
VECMPI
);
void
PetscSolverSchur
::
fillPetscRhs
(
SystemVector
*
v
ec
)
{
FUNCNAME
(
"PetscSolverSchur::fillPetscRhs()"
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscTmpVec
);
VecSetSizes
(
petscTmpVec
,
nRankRows
,
nOverallRows
)
;
VecSetType
(
petscTmpVec
,
VECMPI
)
;
int
nComponents
=
vec
->
getSize
(
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
...
...
@@ -269,7 +271,8 @@ namespace AMDiS {
}
void
PetscSolverSchur
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
void
PetscSolverSchur
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
{
FUNCNAME
(
"PetscSolverSchur::solvePetscMatrix()"
);
...
...
AMDiS/src/parallel/PetscSolverSchur.h
View file @
0e885e23
...
...
@@ -38,10 +38,15 @@ namespace AMDiS {
:
PetscSolver
()
{}
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
);
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
);
void
fillPetscRhs
(
SystemVector
*
vec
);
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
void
destroyMatrixData
()
{}
Flag
getBoundaryDofRequirement
()
{
return
...
...
AMDiS/src/time/RosenbrockAdaptInstationary.cc
View file @
0e885e23
...
...
@@ -53,15 +53,15 @@ namespace AMDiS {
FUNCNAME
(
"RosenbrockAdaptInstationary::RosenbrockAdaptInstationary()"
);
initConstructor
(
&
problemStat
);
}
void
RosenbrockAdaptInstationary
::
initConstructor
(
RosenbrockStationary
*
problemStat
)