Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
8881aea2
Commit
8881aea2
authored
Apr 07, 2011
by
Thomas Witkowski
Browse files
PetscSchur solver makes use of MatNest for simpler definition of preconditioners.
parent
c8a15546
Changes
2
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/PetscSolver.cc
View file @
8881aea2
...
...
@@ -27,6 +27,29 @@ namespace AMDiS {
}
void
PetscSolver
::
printSolutionInfo
(
AdaptInfo
*
adaptInfo
,
bool
iterationCounter
,
bool
residual
)
{
FUNCNAME
(
"PetscSolver::printSolutionInfo()"
);
if
(
iterationCounter
)
{
int
iterations
=
0
;
KSPGetIterationNumber
(
solver
,
&
iterations
);
MSG
(
" Number of iterations: %d
\n
"
,
iterations
);
adaptInfo
->
setSolverIterations
(
iterations
);
}
if
(
residual
)
{
double
norm
=
0.0
;
MatMult
(
petscMatrix
,
petscSolVec
,
petscTmpVec
);
VecAXPY
(
petscTmpVec
,
-
1.0
,
petscRhsVec
);
VecNorm
(
petscTmpVec
,
NORM_2
,
&
norm
);
MSG
(
" Residual norm: %e
\n
"
,
norm
);
}
}
void
PetscSolverGlobalMatrix
::
fillPetscMatrix
(
Matrix
<
DOFMatrix
*>
*
mat
,
SystemVector
*
vec
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
...
...
@@ -139,10 +162,6 @@ namespace AMDiS {
}
// === Init PETSc solver. ===
KSP
solver
;
PC
pc
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
...
...
@@ -157,17 +176,14 @@ namespace AMDiS {
KSPSetInitialGuessNonzero
(
solver
,
PETSC_TRUE
);
// === Run PETSc. ===
// PETSc.
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
// === Transfere values from PETSc's solution vectors to the dof vectors.
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
...
...
@@ -178,26 +194,16 @@ namespace AMDiS {
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to
more
===
// === than one partition.
===
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// ===
more
than one partition. ===
meshDistributor
->
synchVector
(
vec
);
// === Print information about solution process. ===
int
iterations
=
0
;
KSPGetIterationNumber
(
solver
,
&
iterations
);
MSG
(
" Number of iterations: %d
\n
"
,
iterations
);
adaptInfo
->
setSolverIterations
(
iterations
);
// Print iteration counter and residual norm of the solution.
printSolutionInfo
(
adaptInfo
);
double
norm
=
0.0
;
MatMult
(
petscMatrix
,
petscSolVec
,
petscTmpVec
);
VecAXPY
(
petscTmpVec
,
-
1.0
,
petscRhsVec
);
VecNorm
(
petscTmpVec
,
NORM_2
,
&
norm
);
MSG
(
" Residual norm: %e
\n
"
,
norm
);
// === Destroy Petsc's variables. ===
// === Destroy PETSc's variables. ===
MatDestroy
(
petscMatrix
);
VecDestroy
(
petscRhsVec
);
...
...
@@ -811,7 +817,6 @@ namespace AMDiS {
MatAssemblyBegin
(
matA22
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
matA22
,
MAT_FINAL_ASSEMBLY
);
Mat
tmpMat
[
2
][
2
];
tmpMat
[
0
][
0
]
=
matA11
;
tmpMat
[
0
][
1
]
=
matA12
;
...
...
@@ -825,26 +830,24 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatGetVecs
(
matA12
,
&
petscRhsVec2
,
&
petscRhsVec1
)
;
MatGetVecs
(
matA12
,
&
petscSolVec2
,
&
petscSolVec1
)
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscRhsVec
);
VecSetSizes
(
petscRhsVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscRhsVec
,
VECMPI
);
Vec
tmpVec
[
2
];
tmpVec
[
0
]
=
petscRhsVec1
;
tmpVec
[
1
]
=
petscRhsVec2
;
VecCreateNest
(
PETSC_COMM_WORLD
,
2
,
PETSC_NULL
,
tmpVec
,
&
petscRhsVec
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscSolVec
);
VecSetSizes
(
petscSolVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscSolVec
,
VECMPI
);
VecCreate
(
PETSC_COMM_WORLD
,
&
petscTmpVec
);
VecSetSizes
(
petscTmpVec
,
nRankRows
,
nOverallRows
);
VecSetType
(
petscTmpVec
,
VECMPI
);
tmpVec
[
0
]
=
petscSolVec1
;
tmpVec
[
1
]
=
petscSolVec2
;
VecCreateNest
(
PETSC_COMM_WORLD
,
2
,
PETSC_NULL
,
tmpVec
,
&
petscSolVec
);
VecAssemblyBegin
(
petscRhsVec1
);
VecAssemblyEnd
(
petscRhsVec1
);
VecAssemblyBegin
(
petscRhsVec2
);
VecAssemblyEnd
(
petscRhsVec2
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -855,33 +858,78 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverSchur::solvePetscMatrix()"
);
KSP
solver
;
PC
pc
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
// PCSetType(pc, PCFIELDSPLIT);
const
PetscInt
interiorField
[
1
]
=
{
0
};
const
PetscInt
boundaryField
[
1
]
=
{
1
};
PCFieldSplitSetFields
(
pc
,
"interior"
,
1
,
interiorField
);
PCFieldSplitSetFields
(
pc
,
"boundary"
,
1
,
boundaryField
);
int
nComponents
=
vec
.
getSize
();
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPMonitorSet
(
solver
,
myKSPMonitor
,
PETSC_NULL
,
0
);
KSPSetFromOptions
(
solver
);
KSPGetPC
(
solver
,
&
pc
);
PCSetType
(
pc
,
PCFIELDSPLIT
);
IS
interiorIs
;
ISCreateStride
(
PETSC_COMM_WORLD
,
nInteriorDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
)
*
nComponents
,
1
,
&
interiorIs
);
PCFieldSplitSetIS
(
pc
,
"interior"
,
interiorIs
);
IS
boundaryIs
;
ISCreateStride
(
PETSC_COMM_WORLD
,
nBoundaryDofs
*
nComponents
,
(
rStartInteriorDofs
+
rStartBoundaryDofs
+
nInteriorDofs
)
*
nComponents
,
1
,
&
boundaryIs
);
PCFieldSplitSetIS
(
pc
,
"boundary"
,
boundaryIs
);
PCSetFromOptions
(
pc
);
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
KSPDestroy
(
solver
);
// === Transfere values from PETSc's solution vectors to AMDiS vectors. ===
VecDestroy
(
petscRhsVec1
);
VecDestroy
(
petscRhsVec2
);
VecDestroy
(
petscRhsVec
);
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
VecDestroy
(
petscSolVec1
);
VecDestroy
(
petscSolVec2
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>::
Iterator
dofIt
(
vec
.
getDOFVector
(
i
),
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
if
(
boundaryDofs
.
count
(
globalRowDof
))
{
int
index
=
(
mapGlobalBoundaryDof
[
globalRowDof
]
-
rStartBoundaryDofs
+
nInteriorDofs
)
*
(
i
+
1
);
*
dofIt
=
vecPointer
[
index
];
}
else
{
int
index
=
(
mapGlobalInteriorDof
[
globalRowDof
]
-
rStartInteriorDofs
)
*
(
i
+
1
);
*
dofIt
=
vecPointer
[
index
];
}
}
}
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// === more than one partition. ===
meshDistributor
->
synchVector
(
vec
);
// === Print iteration counter and residual norm of the solution. ===
printSolutionInfo
(
adaptInfo
);
// === Destroy PETSC's variables. ===
VecDestroy
(
petscRhsVec
);
VecDestroy
(
petscSolVec
);
VecDestroy
(
petscTmpVec
);
MatDestroy
(
matA11
);
MatDestroy
(
matA12
);
...
...
@@ -889,8 +937,7 @@ namespace AMDiS {
MatDestroy
(
matA22
);
MatDestroy
(
petscMatrix
);
MSG
(
"SOLVED IT!
\n
"
);
exit
(
0
);
KSPDestroy
(
solver
);
}
...
...
@@ -984,11 +1031,16 @@ namespace AMDiS {
double
value
=
*
dofIt
;
if
(
boundaryDofs
.
count
(
globalRowDof
))
{
int
index
=
mapGlobalBoundaryDof
[
globalRowDof
]
*
dispMult
+
dispAdd
;
VecSetValues
(
petscRhsVec2
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
int
index
=
(
rStartInteriorDofs
+
nInteriorDofs
+
mapGlobalBoundaryDof
[
globalRowDof
])
*
dispMult
+
dispAdd
;
VecSetValues
(
petscRhsVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
}
else
{
int
index
=
mapGlobalInteriorDof
[
globalRowDof
]
*
dispMult
+
dispAdd
;
VecSetValues
(
petscRhsVec1
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
int
index
=
(
rStartBoundaryDofs
+
mapGlobalInteriorDof
[
globalRowDof
])
*
dispMult
+
dispAdd
;
VecSetValues
(
petscRhsVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
}
}
}
...
...
AMDiS/src/parallel/PetscSolver.h
View file @
8881aea2
...
...
@@ -41,6 +41,7 @@ namespace AMDiS {
using
namespace
std
;
class
PetscSolver
{
public:
...
...
@@ -68,8 +69,26 @@ namespace AMDiS {
/// Use PETSc to solve the linear system of equations
virtual
void
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
=
0
;
protected:
void
printSolutionInfo
(
AdaptInfo
*
adaptInfo
,
bool
iterationCounter
=
true
,
bool
residual
=
true
);
protected:
MeshDistributor
*
meshDistributor
;
/// Petsc's matrix structure.
Mat
petscMatrix
;
/** \brief
* PETSc's vector structures for the rhs vector, the solution vector and a
* temporary vector for calculating the final residuum.
*/
Vec
petscRhsVec
,
petscSolVec
,
petscTmpVec
;
KSP
solver
;
PC
pc
;
};
...
...
@@ -103,15 +122,6 @@ namespace AMDiS {
int
disMult
=
1
,
int
dispAdd
=
0
,
bool
rankOnly
=
false
);
protected:
/// Petsc's matrix structure.
Mat
petscMatrix
;
/** \brief
* PETSc's vector structures for the rhs vector, the solution vector and a
* temporary vector for calculating the final residuum.
*/
Vec
petscRhsVec
,
petscSolVec
,
petscTmpVec
;
/// Arrays definig the non zero pattern of Petsc's matrix.
int
*
d_nnz
,
*
o_nnz
;
...
...
@@ -172,14 +182,6 @@ namespace AMDiS {
map
<
DegreeOfFreedom
,
DegreeOfFreedom
>
mapGlobalInteriorDof
;
Mat
matA11
,
matA12
,
matA21
,
matA22
;
Mat
petscMatrix
;
Vec
petscRhsVec1
,
petscRhsVec2
;
Vec
petscSolVec1
,
petscSolVec2
;
Vec
petscRhsVec
,
petscSolVec
;
};
#endif
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment