Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
0e885e23
Commit
0e885e23
authored
Nov 11, 2011
by
Thomas Witkowski
Browse files
Rosenbrock now should work also in parallel, tested only for FETI-DP solver.
parent
02638872
Changes
18
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/ProblemInstat.h
View file @
0e885e23
...
@@ -64,7 +64,7 @@ namespace AMDiS {
...
@@ -64,7 +64,7 @@ namespace AMDiS {
virtual
void
solve
(
AdaptInfo
*
adaptInfo
)
{}
virtual
void
solve
(
AdaptInfo
*
adaptInfo
)
{}
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
,
bool
)
{
{
solve
(
adaptInfo
);
solve
(
adaptInfo
);
}
}
...
...
AMDiS/src/ProblemStat.cc
View file @
0e885e23
...
@@ -540,7 +540,7 @@ namespace AMDiS {
...
@@ -540,7 +540,7 @@ namespace AMDiS {
}
}
void
ProblemStatSeq
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
void
ProblemStatSeq
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
,
bool
)
{
{
FUNCNAME
(
"Problem::solve()"
);
FUNCNAME
(
"Problem::solve()"
);
...
...
AMDiS/src/ProblemStat.h
View file @
0e885e23
...
@@ -67,7 +67,7 @@ namespace AMDiS {
...
@@ -67,7 +67,7 @@ namespace AMDiS {
public:
public:
/// Constructor
/// Constructor
ProblemStatSeq
(
string
nameStr
,
ProblemStatSeq
(
string
nameStr
,
ProblemIterationInterface
*
problemIteration
=
NULL
)
ProblemIterationInterface
*
problemIteration
=
NULL
)
:
StandardProblemIteration
(
this
),
:
StandardProblemIteration
(
this
),
name
(
nameStr
),
name
(
nameStr
),
nComponents
(
-
1
),
nComponents
(
-
1
),
...
@@ -147,7 +147,9 @@ namespace AMDiS {
...
@@ -147,7 +147,9 @@ namespace AMDiS {
* Implementation of ProblemStatBase::solve(). Deligates the solving
* Implementation of ProblemStatBase::solve(). Deligates the solving
* of problems system to \ref solver.
* of problems system to \ref solver.
*/
*/
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
=
false
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
);
/** \brief
/** \brief
* Implementation of ProblemStatBase::estimate(). Deligates the estimation
* Implementation of ProblemStatBase::estimate(). Deligates the estimation
...
...
AMDiS/src/ProblemStatBase.h
View file @
0e885e23
...
@@ -104,8 +104,27 @@ namespace AMDiS {
...
@@ -104,8 +104,27 @@ namespace AMDiS {
/// Coarsening of the mesh.
/// Coarsening of the mesh.
virtual
Flag
coarsenMesh
(
AdaptInfo
*
adaptInfo
)
=
0
;
virtual
Flag
coarsenMesh
(
AdaptInfo
*
adaptInfo
)
=
0
;
/// Solves the assembled system. The result is an approximative solution.
/** \brief
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
=
0
;
* Solves the assembled system. The result is an approximative solution.
* The last two boolean arguments can be used to controll successive
* solutions of systems with the same matrix.
*
* \param adaptInfo Pointer to an \ref AdaptInfo object.
* \param createMatrixData If false, the solver assumes that all of its
* internal data structures for the system
* matrix are already created. This is the case,
* if we solve different systems but with the
* same matrix. After the first call to this
* function (with this parameter set to true),
* all other calls may set it to false.
* \param storeMatrixData If true, all internal data structures for the
* system matrix are not deleted such that they
* can be used for next solutions with the same
* system matrix.
*/
virtual
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
)
=
0
;
/** \brief
/** \brief
* A posteriori error estimation of the calculated solution. Should store
* A posteriori error estimation of the calculated solution. Should store
...
...
AMDiS/src/StandardProblemIteration.cc
View file @
0e885e23
...
@@ -37,10 +37,10 @@ namespace AMDiS {
...
@@ -37,10 +37,10 @@ namespace AMDiS {
Flag
flag
=
buildAndAdapt
(
adaptInfo
,
toDo
);
Flag
flag
=
buildAndAdapt
(
adaptInfo
,
toDo
);
if
(
toDo
.
isSet
(
SOLVE
))
if
(
toDo
.
isSet
(
SOLVE
))
problem
->
solve
(
adaptInfo
,
false
);
problem
->
solve
(
adaptInfo
,
true
,
false
);
if
(
toDo
.
isSet
(
SOLVE_RHS
))
if
(
toDo
.
isSet
(
SOLVE_RHS
))
problem
->
solve
(
adaptInfo
,
true
);
problem
->
solve
(
adaptInfo
,
true
,
false
);
if
(
toDo
.
isSet
(
ESTIMATE
))
if
(
toDo
.
isSet
(
ESTIMATE
))
problem
->
estimate
(
adaptInfo
);
problem
->
estimate
(
adaptInfo
);
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
0e885e23
...
@@ -61,7 +61,9 @@ namespace AMDiS {
...
@@ -61,7 +61,9 @@ namespace AMDiS {
}
}
void
PetscProblemStat
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
)
void
PetscProblemStat
::
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
,
bool
storeMatrixData
)
{
{
FUNCNAME
(
"PetscProblemStat::solve()"
);
FUNCNAME
(
"PetscProblemStat::solve()"
);
...
@@ -79,8 +81,13 @@ namespace AMDiS {
...
@@ -79,8 +81,13 @@ namespace AMDiS {
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
#endif
petscSolver
->
setMeshDistributor
(
meshDistributor
);
if
(
createMatrixData
)
{
petscSolver
->
fillPetscMatrix
(
systemMatrix
,
rhs
);
petscSolver
->
setMeshDistributor
(
meshDistributor
);
petscSolver
->
fillPetscMatrix
(
systemMatrix
);
}
petscSolver
->
fillPetscRhs
(
rhs
);
#if 0
#if 0
processMemUsage(vm, rss);
processMemUsage(vm, rss);
...
@@ -93,6 +100,9 @@ namespace AMDiS {
...
@@ -93,6 +100,9 @@ namespace AMDiS {
petscSolver
->
solvePetscMatrix
(
*
solution
,
adaptInfo
);
petscSolver
->
solvePetscMatrix
(
*
solution
,
adaptInfo
);
if
(
!
storeMatrixData
)
petscSolver
->
destroyMatrixData
();
#if 0
#if 0
processMemUsage(vm, rss);
processMemUsage(vm, rss);
MSG("STAGE 3\n");
MSG("STAGE 3\n");
...
@@ -102,7 +112,6 @@ namespace AMDiS {
...
@@ -102,7 +112,6 @@ namespace AMDiS {
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
MSG("Overall memory usage is VM = %.1f MB RSS = %.1f MB\n", vm, rss);
#endif
#endif
INFO
(
info
,
8
)(
"solution of discrete system needed %.5f seconds
\n
"
,
INFO
(
info
,
8
)(
"solution of discrete system needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
MPI
::
Wtime
()
-
wtime
);
}
}
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
0e885e23
...
@@ -48,7 +48,9 @@ namespace AMDiS {
...
@@ -48,7 +48,9 @@ namespace AMDiS {
ProblemStatSeq
*
adoptProblem
=
NULL
,
ProblemStatSeq
*
adoptProblem
=
NULL
,
Flag
adoptFlag
=
INIT_NOTHING
);
Flag
adoptFlag
=
INIT_NOTHING
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
fixedMatrix
=
false
);
void
solve
(
AdaptInfo
*
adaptInfo
,
bool
createMatrixData
=
true
,
bool
storeMatrixData
=
false
);
protected:
protected:
PetscSolver
*
petscSolver
;
PetscSolver
*
petscSolver
;
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
0e885e23
...
@@ -60,18 +60,26 @@ namespace AMDiS {
...
@@ -60,18 +60,26 @@ namespace AMDiS {
}
}
/** \brief
/** \brief
* Create a PETSc matrix and PETSc vectors. The given DOF matrices are used to
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* create the nnz structure of the PETSc matrix and the values are transfered to it.
* structure of the PETSc matrix and the values are transfered to it.
* The given DOF vectors are used to the the values of the PETSc rhs vector.
*
*
* \param[in] mat
* \param[in] mat
*/
virtual
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
=
0
;
/** \brief
* Create a PETSc vector and fills it with the rhs values of the system.
*
* \param[in] vec
* \param[in] vec
*/
*/
virtual
void
fillPetsc
Matrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
=
0
;
virtual
void
fillPetsc
Rhs
(
SystemVector
*
vec
)
=
0
;
/// Use PETSc to solve the linear system of equations
/// Use PETSc to solve the linear system of equations
virtual
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
=
0
;
virtual
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
=
0
;
/// Destroys all matrix data structures.
virtual
void
destroyMatrixData
()
=
0
;
virtual
Flag
getBoundaryDofRequirement
()
virtual
Flag
getBoundaryDofRequirement
()
{
{
return
0
;
return
0
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
0e885e23
...
@@ -595,8 +595,13 @@ namespace AMDiS {
...
@@ -595,8 +595,13 @@ namespace AMDiS {
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
schurPrimalData
.
ksp_b
=
&
ksp_b
;
schurPrimalData
.
ksp_b
=
&
ksp_b
;
VecDuplicate
(
f_b
,
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecDuplicate
(
f_primal
,
&
(
schurPrimalData
.
tmp_vec_primal
));
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
schurPrimalData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
(
schurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
MatCreateShell
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
...
@@ -643,9 +648,15 @@ namespace AMDiS {
...
@@ -643,9 +648,15 @@ namespace AMDiS {
fetiData
.
ksp_b
=
&
ksp_b
;
fetiData
.
ksp_b
=
&
ksp_b
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecDuplicate
(
f_b
,
&
(
fetiData
.
tmp_vec_b0
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecDuplicate
(
f_b
,
&
(
fetiData
.
tmp_vec_b1
));
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
VecDuplicate
(
f_primal
,
&
(
fetiData
.
tmp_vec_primal
));
&
(
fetiData
.
tmp_vec_b0
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiData
.
tmp_vec_b1
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
(
fetiData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
MatCreateShell
(
PETSC_COMM_WORLD
,
nRankLagrange
*
nComponents
,
nRankLagrange
*
nComponents
,
nRankLagrange
*
nComponents
,
nRankLagrange
*
nComponents
,
...
@@ -681,7 +692,9 @@ namespace AMDiS {
...
@@ -681,7 +692,9 @@ namespace AMDiS {
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
mat_duals_interior
=
&
mat_duals_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
fetiDirichletPreconData
.
ksp_interior
=
&
ksp_interior
;
VecDuplicate
(
f_b
,
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiDirichletPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals1
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_duals1
));
MatGetVecs
(
mat_interior_interior
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_interior
));
MatGetVecs
(
mat_interior_interior
,
PETSC_NULL
,
&
(
fetiDirichletPreconData
.
tmp_vec_interior
));
...
@@ -697,7 +710,9 @@ namespace AMDiS {
...
@@ -697,7 +710,9 @@ namespace AMDiS {
fetiLumpedPreconData
.
mat_lagrange_scaled
=
&
mat_lagrange_scaled
;
fetiLumpedPreconData
.
mat_lagrange_scaled
=
&
mat_lagrange_scaled
;
fetiLumpedPreconData
.
mat_duals_duals
=
&
mat_duals_duals
;
fetiLumpedPreconData
.
mat_duals_duals
=
&
mat_duals_duals
;
VecDuplicate
(
f_b
,
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiLumpedPreconData
.
tmp_vec_b
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals0
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals1
));
MatGetVecs
(
mat_duals_duals
,
PETSC_NULL
,
&
(
fetiLumpedPreconData
.
tmp_vec_duals1
));
...
@@ -851,12 +866,11 @@ namespace AMDiS {
...
@@ -851,12 +866,11 @@ namespace AMDiS {
}
}
void
PetscSolverFeti
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
void
PetscSolverFeti
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
SystemVector
*
vec
)
{
{
FUNCNAME
(
"PetscSolverFeti::fillPetscMatrix()"
);
FUNCNAME
(
"PetscSolverFeti::fillPetscMatrix()"
);
nComponents
=
vec
->
getSize
();
nComponents
=
mat
->
getSize
();
// === Create all sets and indices. ===
// === Create all sets and indices. ===
...
@@ -1156,16 +1170,40 @@ namespace AMDiS {
...
@@ -1156,16 +1170,40 @@ namespace AMDiS {
}
}
// === Create and fill PETSc's right hand side vectors. ===
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
();
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
();
// === Create solver for the non primal (thus local) variables. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_b
);
KSPSetOperators
(
ksp_b
,
mat_b_b
,
mat_b_b
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_b
,
"solver_b_"
);
KSPSetFromOptions
(
ksp_b
);
}
VecCreate
(
PETSC_COMM_WORLD
,
&
f_b
);
void
PetscSolverFeti
::
fillPetscRhs
(
SystemVector
*
vec
)
VecSetSizes
(
f_b
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
);
{
VecSetType
(
f_b
,
VECMPI
);
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
int
nComponents
=
vec
->
getSize
();
VecCreate
(
PETSC_COMM_WORLD
,
&
f_primal
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecSetSizes
(
f_primal
,
nRankPrimals
*
nComponents
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
f_b
);
nOverallPrimals
*
nComponents
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecSetType
(
f_primal
,
VECMPI
);
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
f_primal
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
...
@@ -1194,21 +1232,6 @@ namespace AMDiS {
...
@@ -1194,21 +1232,6 @@ namespace AMDiS {
VecAssemblyBegin
(
f_primal
);
VecAssemblyBegin
(
f_primal
);
VecAssemblyEnd
(
f_primal
);
VecAssemblyEnd
(
f_primal
);
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
();
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
();
}
}
...
@@ -1216,11 +1239,9 @@ namespace AMDiS {
...
@@ -1216,11 +1239,9 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::solveFetiMatrix()"
);
FUNCNAME
(
"PetscSolverFeti::solveFetiMatrix()"
);
// Create transpose of Lagrange matrix.
Mat
mat_lagrange_transpose
;
Mat
mat_lagrange_transpose
;
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
mat_lagrange_transpose
);
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
mat_lagrange_transpose
);
// === Create nested matrix which will contain the overall FETI system. ===
// === Create nested matrix which will contain the overall FETI system. ===
Mat
A
;
Mat
A
;
...
@@ -1261,12 +1282,8 @@ namespace AMDiS {
...
@@ -1261,12 +1282,8 @@ namespace AMDiS {
// === Create rhs and solution vectors for the overall FETI system. ===
// === Create rhs and solution vectors for the overall FETI system. ===
Vec
f
;
Vec
f
,
b
;
VecCreate
(
PETSC_COMM_WORLD
,
&
f
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankNest
,
nOverallNest
,
&
f
);
VecSetSizes
(
f
,
nRankNest
,
nOverallNest
);
VecSetType
(
f
,
VECMPI
);
Vec
b
;
VecDuplicate
(
f
,
&
b
);
VecDuplicate
(
f
,
&
b
);
...
@@ -1352,14 +1369,7 @@ namespace AMDiS {
...
@@ -1352,14 +1369,7 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
// === Create solver for the non primal (thus local) variables. ===
// RHS vector.
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_b
);
KSPSetOperators
(
ksp_b
,
mat_b_b
,
mat_b_b
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_b
,
"solver_b_"
);
KSPSetFromOptions
(
ksp_b
);
// RHS and solution vector.
Vec
vec_rhs
;
Vec
vec_rhs
;
// Some temporary vectors.
// Some temporary vectors.
...
@@ -1444,8 +1454,14 @@ namespace AMDiS {
...
@@ -1444,8 +1454,14 @@ namespace AMDiS {
VecDestroy
(
&
tmp_primal0
);
VecDestroy
(
&
tmp_primal0
);
VecDestroy
(
&
tmp_primal1
);
VecDestroy
(
&
tmp_primal1
);
VecDestroy
(
&
f_b
);
VecDestroy
(
&
f_primal
);
}
KSPDestroy
(
&
ksp_b
);
void
PetscSolverFeti
::
destroyMatrixData
()
{
FUNCNAME
(
"PetscSolverFeti::destroyMatrixData()"
);
MatDestroy
(
&
mat_b_b
);
MatDestroy
(
&
mat_b_b
);
MatDestroy
(
&
mat_primal_primal
);
MatDestroy
(
&
mat_primal_primal
);
...
@@ -1453,14 +1469,6 @@ namespace AMDiS {
...
@@ -1453,14 +1469,6 @@ namespace AMDiS {
MatDestroy
(
&
mat_primal_b
);
MatDestroy
(
&
mat_primal_b
);
MatDestroy
(
&
mat_lagrange
);
MatDestroy
(
&
mat_lagrange
);
VecDestroy
(
&
f_b
);
VecDestroy
(
&
f_primal
);
destroySchurPrimalKsp
();
destroyFetiKsp
();
// === Destroy preconditioner data structures. ===
// === Destroy preconditioner data structures. ===
if
(
fetiPreconditioner
!=
FETI_NONE
)
if
(
fetiPreconditioner
!=
FETI_NONE
)
...
@@ -1471,8 +1479,13 @@ namespace AMDiS {
...
@@ -1471,8 +1479,13 @@ namespace AMDiS {
MatDestroy
(
&
mat_interior_duals
);
MatDestroy
(
&
mat_interior_duals
);
MatDestroy
(
&
mat_duals_interior
);
MatDestroy
(
&
mat_duals_interior
);
}
}
}
destroySchurPrimalKsp
();
destroyFetiKsp
();
KSPDestroy
(
&
ksp_b
);
}
void
PetscSolverFeti
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
void
PetscSolverFeti
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
{
{
...
@@ -1490,6 +1503,7 @@ namespace AMDiS {
...
@@ -1490,6 +1503,7 @@ namespace AMDiS {
}
}
}
}
#endif
#endif
}
}
AMDiS/src/parallel/PetscSolverFeti.h
View file @
0e885e23
...
@@ -133,13 +133,19 @@ namespace AMDiS {
...
@@ -133,13 +133,19 @@ namespace AMDiS {
public:
public:
PetscSolverFeti
();
PetscSolverFeti
();
/// Assemble the sequentially created matrices and vectors to the
/// Assemble the sequentially created matrices to the global matrices
/// global matrices and vectors required by the FETI-DP method.
/// required by the FETI-DP method.
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
);
void
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
);
/// Assembles the global rhs vectors from the sequentially created ones.
void
fillPetscRhs
(
SystemVector
*
vec
);
/// Solve the system using FETI-DP method.
/// Solve the system using FETI-DP method.
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
);
/// Destroys all matrix data structures.
void
destroyMatrixData
();
/// Returns flags to denote which information of the boundary DOFs are
/// Returns flags to denote which information of the boundary DOFs are
/// required by the FETI-DP solver.
/// required by the FETI-DP solver.
Flag
getBoundaryDofRequirement
()
Flag
getBoundaryDofRequirement
()
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
0e885e23
...
@@ -25,36 +25,28 @@ namespace AMDiS {
...
@@ -25,36 +25,28 @@ namespace AMDiS {
}
}
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
)
{
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
double
wtime
=
MPI
::
Wtime
();
int
nComponents
=
mat
->
getNumRows
();
int
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
// === Create PETSc vector (rhs, solution and a temporary vector). ===
// === Create PETSc vector (solution and a temporary vector). ===
VecCreate
(
PETSC_COMM_WORLD
,
&
petscRhsVec
);
VecSetSizes
(
petscRhsVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscRhsVec
,
VECMPI
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscSolVec
);
VecSetSizes
(
petscSolVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscSolVec
,
VECMPI
);