Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
8b20a02e
Commit
8b20a02e
authored
May 02, 2011
by
Thomas Witkowski
Browse files
Added comments to the very first FETI-DP implementation.
parent
c7503545
Changes
2
Show whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
8b20a02e
...
...
@@ -94,6 +94,9 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createPrimals()"
);
// === Define all vertices on the interior boundaries of the macro mesh ===
// === to be primal variables. ===
primals
.
clear
();
DofContainerSet
&
vertices
=
meshDistributor
->
getBoundaryDofInfo
().
geoDofs
[
VERTEX
];
...
...
@@ -102,6 +105,10 @@ namespace AMDiS {
it
!=
vertices
.
end
();
++
it
)
primals
.
insert
(
**
it
);
// === Calculate the number of primals that are owned by the rank and ===
// === create local indices of the primals starting at zero. ===
globalPrimalIndex
.
clear
();
nRankPrimals
=
0
;
for
(
DofIndexSet
::
iterator
it
=
primals
.
begin
();
it
!=
primals
.
end
();
++
it
)
...
...
@@ -111,11 +118,17 @@ namespace AMDiS {
}
// === Get overall number of primals and rank's displacement in the ===
// === numbering of the primals. ===
nOverallPrimals
=
0
;
rStartPrimals
=
0
;
mpi
::
getDofNumbering
(
meshDistributor
->
getMpiComm
(),
nRankPrimals
,
rStartPrimals
,
nOverallPrimals
);
// === Create global primal index for all primals. ===
for
(
DofMapping
::
iterator
it
=
globalPrimalIndex
.
begin
();
it
!=
globalPrimalIndex
.
end
();
++
it
)
it
->
second
+=
rStartPrimals
;
...
...
@@ -123,6 +136,11 @@ namespace AMDiS {
MSG_DBG
(
"nRankPrimals = %d nOverallPrimals = %d
\n
"
,
nRankPrimals
,
nOverallPrimals
);
// === Communicate primal's global index from ranks that own the ===
// === primals to ranks that contain this primals but are not owning ===
// === them. ===
StdMpi
<
vector
<
int
>
>
stdMpi
(
meshDistributor
->
getMpiComm
());
RankToDofContainer
&
sendDofs
=
meshDistributor
->
getSendDofs
();
for
(
RankToDofContainer
::
iterator
it
=
sendDofs
.
begin
();
...
...
@@ -175,8 +193,8 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createDuals()"
);
// === Create for each dual node th
e set of ranks that contain this
===
// ===
node (denoted by W(x_j)).
===
// === Create for each dual node th
at is owned by the rank, the set
===
// ===
of ranks that contain this node (denoted by W(x_j)).
===
boundaryDofRanks
.
clear
();
...
...
@@ -193,6 +211,10 @@ namespace AMDiS {
}
}
// === Communicate these sets for all rank owned dual nodes to other ===
// === ranks that also have this node. ===
StdMpi
<
vector
<
std
::
set
<
int
>
>
>
stdMpi
(
meshDistributor
->
getMpiComm
());
for
(
RankToDofContainer
::
iterator
it
=
sendDofs
.
begin
();
it
!=
sendDofs
.
end
();
++
it
)
...
...
@@ -266,6 +288,9 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createLagrange()"
);
// === Reserve for each dual node, on the rank that owns this node, the ===
// === appropriate number of Lagrange constraints. ===
nRankLagrange
=
0
;
for
(
DofIndexSet
::
iterator
it
=
duals
.
begin
();
it
!=
duals
.
end
();
++
it
)
{
if
(
meshDistributor
->
getIsRankDof
(
*
it
))
{
...
...
@@ -275,6 +300,11 @@ namespace AMDiS {
}
}
// === Get the overall number of Lagrange constraints and create the ===
// === mapping dofFirstLagrange, that defines for each dual boundary ===
// === node the first Lagrange constraint global index. ===
nOverallLagrange
=
0
;
rStartLagrange
=
0
;
mpi
::
getDofNumbering
(
meshDistributor
->
getMpiComm
(),
...
...
@@ -288,7 +318,7 @@ namespace AMDiS {
nRankLagrange
,
nOverallLagrange
);
// === ===
// ===
Communicate dofFirstLagrange to all other ranks.
===
StdMpi
<
vector
<
int
>
>
stdMpi
(
meshDistributor
->
getMpiComm
());
RankToDofContainer
&
sendDofs
=
meshDistributor
->
getSendDofs
();
...
...
@@ -327,7 +357,6 @@ namespace AMDiS {
if
(
primals
.
count
(
*
(
it
->
second
[
i
]))
==
0
)
dofFirstLagrange
[
*
(
it
->
second
[
i
])]
=
stdMpi
.
getRecvData
(
it
->
first
)[
counter
++
];
}
}
...
...
@@ -338,11 +367,18 @@ namespace AMDiS {
globalIndexB
.
clear
();
DOFAdmin
*
admin
=
meshDistributor
->
getFeSpace
()
->
getAdmin
();
// === To ensure that all interior node on each rank are listen first in ===
// === the global index of all B nodes, insert all interior nodes first, ===
// === without defining a correct index. ===
for
(
int
i
=
0
;
i
<
admin
->
getUsedSize
();
i
++
)
if
(
admin
->
isDofFree
(
i
)
==
false
&&
primals
.
count
(
i
)
==
0
)
if
(
duals
.
count
(
i
)
==
0
&&
primals
.
count
(
i
)
==
0
)
globalIndexB
[
i
]
=
-
1
;
// === Get correct index for all interior nodes. ===
int
nInterior
=
0
;
for
(
DofMapping
::
iterator
it
=
globalIndexB
.
begin
();
it
!=
globalIndexB
.
end
();
++
it
)
{
...
...
@@ -354,28 +390,45 @@ namespace AMDiS {
static_cast
<
unsigned
int
>
(
admin
->
getUsedDofs
()))
(
"Should not happen!
\n
"
);
// === And finally, add the global indicies of all dual nodes. ===
for
(
DofIndexSet
::
iterator
it
=
duals
.
begin
();
it
!=
duals
.
end
();
++
it
)
globalIndexB
[
*
it
]
=
globalDualIndex
[
*
it
];
}
void
PetscSolverFeti
::
createMatLagrange
(
int
nComponents
)
void
PetscSolverFeti
::
createMatLagrange
()
{
FUNCNAME
(
"PetscSolverFeti::createMatLagrange()"
);
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankLagrange
*
nComponents
,
nRankB
*
nComponents
,
nOverallLagrange
*
nComponents
,
nOverallB
*
nComponents
,
nRankLagrange
*
nComponents
,
nRankB
*
nComponents
,
nOverallLagrange
*
nComponents
,
nOverallB
*
nComponents
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
&
mat_lagrange
);
// === Create for all duals the corresponding Lagrange constraints. On ===
// === each rank we traverse all pairs (n, m) of ranks, with n < m, ===
// === that contain this node. If the current rank number is r, and ===
// === n == r, the rank sets 1.0 for the corresponding constraint, if ===
// === m == r, than the rank sets -1.0 for the corresponding ===
// === constraint. ===
for
(
DofIndexSet
::
iterator
it
=
duals
.
begin
();
it
!=
duals
.
end
();
++
it
)
{
TEST_EXIT_DBG
(
dofFirstLagrange
.
count
(
*
it
))(
"Should not happen!
\n
"
);
TEST_EXIT_DBG
(
boundaryDofRanks
.
count
(
*
it
))(
"Should not happen!
\n
"
);
// Global index of the first Lagrange constriant for this node.
int
index
=
dofFirstLagrange
[
*
it
];
// Copy set of all ranks that contain this dual node.
vector
<
int
>
W
(
boundaryDofRanks
[
*
it
].
begin
(),
boundaryDofRanks
[
*
it
].
end
());
// Number of ranks that contain this dual node.
int
degree
=
W
.
size
();
TEST_EXIT_DBG
(
globalDualIndex
.
count
(
*
it
))(
"Should not happen!
\n
"
);
...
...
@@ -384,6 +437,7 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
degree
;
i
++
)
{
for
(
int
j
=
i
+
1
;
j
<
degree
;
j
++
)
{
if
(
W
[
i
]
==
mpiRank
||
W
[
j
]
==
mpiRank
)
{
// Set the constraint for all components of the system.
for
(
int
k
=
0
;
k
<
nComponents
;
k
++
)
{
int
rowIndex
=
index
*
nComponents
+
k
;
int
colIndex
=
dualCol
*
nComponents
+
k
;
...
...
@@ -403,7 +457,7 @@ namespace AMDiS {
}
void
PetscSolverFeti
::
createSchurPrimalKsp
(
int
nComponents
)
void
PetscSolverFeti
::
createSchurPrimalKsp
()
{
FUNCNAME
(
"PetscSolverFeti::createSchurPrimal()"
);
...
...
@@ -412,10 +466,8 @@ namespace AMDiS {
petscSchurPrimalData
.
mat_b_primal
=
&
mat_b_primal
;
petscSchurPrimalData
.
ksp_b
=
&
ksp_b
;
MatGetVecs
(
mat_b_b
,
PETSC_NULL
,
&
(
petscSchurPrimalData
.
tmp_vec_b
));
MatGetVecs
(
mat_primal_primal
,
PETSC_NULL
,
&
(
petscSchurPrimalData
.
tmp_vec_primal
));
VecDuplicate
(
f_b
,
&
(
petscSchurPrimalData
.
tmp_vec_b
));
VecDuplicate
(
f_primal
,
&
(
petscSchurPrimalData
.
tmp_vec_primal
));
MatCreateShell
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
...
...
@@ -461,8 +513,8 @@ namespace AMDiS {
petscFetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
MatGetVecs
(
mat_b_b
,
PETSC_NULL
,
&
(
petscFetiData
.
tmp_vec_b
));
MatGetVecs
(
mat_primal_primal
,
PETSC_NULL
,
&
(
petscFetiData
.
tmp_vec_primal
));
VecDuplicate
(
f_b
,
&
(
petscFetiData
.
tmp_vec_b
));
VecDuplicate
(
f_primal
,
&
(
petscFetiData
.
tmp_vec_primal
));
MatGetVecs
(
mat_lagrange
,
PETSC_NULL
,
&
(
petscFetiData
.
tmp_vec_lagrange
));
...
...
@@ -491,12 +543,10 @@ namespace AMDiS {
petscFetiData
.
ksp_b
=
PETSC_NULL
;
petscFetiData
.
ksp_schur_primal
=
PETSC_NULL
;
VecDestroy
(
petscFetiData
.
tmp_vec_b
);
VecDestroy
(
petscFetiData
.
tmp_vec_primal
);
VecDestroy
(
petscFetiData
.
tmp_vec_lagrange
);
MatDestroy
(
mat_feti
);
KSPDestroy
(
ksp_feti
);
}
...
...
@@ -508,15 +558,14 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::recoverSolution()"
);
int
nComponents
=
vec
.
getSize
();
// === ===
// === Get local part of the solution for B variables. ===
PetscScalar
*
localSolB
;
VecGetArray
(
vec_sol_b
,
&
localSolB
);
// === ===
// === Create scatter to get solutions of all primal nodes that are ===
// === contained in rank's domain. ===
vector
<
PetscInt
>
globalIsIndex
,
localIsIndex
;
globalIsIndex
.
reserve
(
globalPrimalIndex
.
size
()
*
nComponents
);
...
...
@@ -549,7 +598,6 @@ namespace AMDiS {
Vec
local_sol_primal
;
VecCreateSeq
(
PETSC_COMM_SELF
,
localIsIndex
.
size
(),
&
local_sol_primal
);
VecScatter
primalScatter
;
VecScatterCreate
(
vec_sol_primal
,
globalIs
,
local_sol_primal
,
localIs
,
&
primalScatter
);
VecScatterBegin
(
primalScatter
,
vec_sol_primal
,
local_sol_primal
,
...
...
@@ -561,12 +609,11 @@ namespace AMDiS {
ISDestroy
(
localIs
);
VecScatterDestroy
(
primalScatter
);
PetscScalar
*
localSolPrimal
;
VecGetArray
(
local_sol_primal
,
&
localSolPrimal
);
// === And copy from PETSc local vectors to the DOF vectors. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>&
dofVec
=
*
(
vec
.
getDOFVector
(
i
));
...
...
@@ -588,7 +635,6 @@ namespace AMDiS {
VecRestoreArray
(
vec_sol_b
,
&
localSolB
);
VecRestoreArray
(
local_sol_primal
,
&
localSolPrimal
);
VecDestroy
(
local_sol_primal
);
}
...
...
@@ -599,33 +645,41 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::fillPetscMatrix()"
);
nComponents
=
vec
->
getSize
();
// === Create all sets and indices. ===
updateDofData
();
int
nComponents
=
vec
->
getSize
();
// === Create matrices for the FETI-DP method. ===
int
nRowsRankB
=
nRankB
*
nComponents
;
int
nRowsOverallB
=
nOverallB
*
nComponents
;
int
nRowsRankPrimal
=
nRankPrimals
*
nComponents
;
int
nRowsOverallPrimal
=
nOverallPrimals
*
nComponents
;
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
nOverallB
*
nComponents
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_b_b
);
nRowsRankB
,
nRowsRankB
,
nRowsOverallB
,
nRowsOverallB
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_b_b
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
10
,
PETSC_NULL
,
10
,
PETSC_NULL
,
&
mat_primal_primal
);
nRowsRankPrimal
,
nRowsRankPrimal
,
nRowsOverallPrimal
,
nRowsOverallPrimal
,
10
,
PETSC_NULL
,
10
,
PETSC_NULL
,
&
mat_primal_primal
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nRankPrimals
*
nComponents
,
nOverallB
*
nComponents
,
nOverallPrimals
*
nComponents
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_b_primal
);
nRowsRankB
,
nRowsRankPrimal
,
nRowsOverallB
,
nRowsOverallPrimal
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_b_primal
);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nRankB
*
nComponents
,
nOverallPrimals
*
nComponents
,
nOverallB
*
nComponents
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_primal_b
);
nRowsRankPrimal
,
nRowsRankB
,
nRowsOverallPrimal
,
nRowsOverallB
,
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
mat_primal_b
);
// === Prepare traverse of sequentially created matrices. ===
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
namespace
traits
=
mtl
::
traits
;
...
...
@@ -641,12 +695,19 @@ namespace AMDiS {
values
.
reserve
(
300
);
valuesOther
.
reserve
(
300
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
((
*
mat
)[
i
][
j
])
{
// === Traverse all sequentially created matrices and add the values to ===
// === the global PETSc matrices. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
if
(
!
(
*
mat
)[
i
][
j
])
continue
;
traits
::
col
<
Matrix
>::
type
col
((
*
mat
)[
i
][
j
]
->
getBaseMatrix
());
traits
::
const_value
<
Matrix
>::
type
value
((
*
mat
)[
i
][
j
]
->
getBaseMatrix
());
// Traverse all rows.
for
(
cursor_type
cursor
=
begin
<
row
>
((
*
mat
)[
i
][
j
]
->
getBaseMatrix
()),
cend
=
end
<
row
>
((
*
mat
)[
i
][
j
]
->
getBaseMatrix
());
cursor
!=
cend
;
++
cursor
)
{
bool
rowPrimal
=
primals
.
count
(
*
cursor
)
!=
0
;
...
...
@@ -657,9 +718,13 @@ namespace AMDiS {
colsOther
.
clear
();
valuesOther
.
clear
();
// Traverse all columns.
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
if
(
primals
.
count
(
col
(
*
icursor
))
!=
0
)
{
// Column is a primal variable.
TEST_EXIT_DBG
(
globalPrimalIndex
.
count
(
col
(
*
icursor
)))
(
"No global primal index for DOF %d!
\n
"
,
col
(
*
icursor
));
...
...
@@ -673,6 +738,8 @@ namespace AMDiS {
valuesOther
.
push_back
(
value
(
*
icursor
));
}
}
else
{
// Column is not a primal variable.
TEST_EXIT_DBG
(
globalIndexB
.
count
(
col
(
*
icursor
)))
(
"No global B index for DOF %d!
\n
"
,
col
(
*
icursor
));
...
...
@@ -713,7 +780,10 @@ namespace AMDiS {
}
}
}
}
// === Start global assembly procedure. ===
MatAssemblyBegin
(
mat_b_b
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat_b_b
,
MAT_FINAL_ASSEMBLY
);
...
...
@@ -728,7 +798,7 @@ namespace AMDiS {
MatAssemblyEnd
(
mat_primal_b
,
MAT_FINAL_ASSEMBLY
);
// === ===
// ===
Create and fill PETSc's right hand side vectors.
===
VecCreate
(
PETSC_COMM_WORLD
,
&
f_b
);
VecSetSizes
(
f_b
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
);
...
...
@@ -768,29 +838,33 @@ namespace AMDiS {
VecAssemblyEnd
(
f_primal
);
createMatLagrange
(
nComponents
);
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
();
createSchurPrimalKsp
(
nComponents
);
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
();
}
void
PetscSolverFeti
::
solve
P
et
sc
Matrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
void
PetscSolverFeti
::
solve
F
et
i
Matrix
(
SystemVector
&
vec
)
{
FUNCNAME
(
"PetscSolverFeti::solvePetscMatrix()"
);
int
debug
=
0
;
Parameters
::
get
(
"parallel->debug feti"
,
debug
);
if
(
debug
)
{
int
nComponents
=
vec
.
getSize
();
FUNCNAME
(
"PetscSolverFeti::solveFetiMatrix()"
);
// Create transpose of Lagrange matrix.
Mat
mat_lagrange_transpose
;
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
mat_lagrange_transpose
);
// === Create nested matrix which will contain the overall FETI system. ===
Mat
A
;
Mat
nestedA
[
3
][
3
];
nestedA
[
0
][
0
]
=
mat_b_b
;
...
...
@@ -815,6 +889,8 @@ namespace AMDiS {
int
rStartNest
=
(
rStartB
+
rStartPrimals
)
*
nComponents
+
rStartLagrange
;
{
// === Test some matrix sizes. ===
int
matRow
,
matCol
;
MatGetLocalSize
(
A
,
&
matRow
,
&
matCol
);
TEST_EXIT_DBG
(
matRow
==
nRankNest
)(
"Should not happen!
\n
"
);
...
...
@@ -825,6 +901,8 @@ namespace AMDiS {
TEST_EXIT_DBG
(
matRow
==
rStartNest
)(
"Should not happen!
\n
"
);
}
// === Create rhs and solution vectors for the overall FETI system. ===
Vec
f
;
VecCreate
(
PETSC_COMM_WORLD
,
&
f
);
VecSetSizes
(
f
,
nRankNest
,
nOverallNest
);
...
...
@@ -833,20 +911,19 @@ namespace AMDiS {
Vec
b
;
VecDuplicate
(
f
,
&
b
);
// === Fill rhs vector by coping the primal and non primal PETSc vectors. ===
PetscScalar
*
local_f_b
;
VecGetArray
(
f_b
,
&
local_f_b
);
{
int
size
;
VecGetLocalSize
(
f_b
,
&
size
);
TEST_EXIT_DBG
(
size
==
nRankB
*
nComponents
)(
"Should not happen!
\n
"
);
}
PetscScalar
*
local_f_primal
;
VecGetArray
(
f_primal
,
&
local_f_primal
);
{
int
size
;
VecGetLocalSize
(
f_b
,
&
size
);
TEST_EXIT_DBG
(
size
==
nRankB
*
nComponents
)(
"Should not happen!
\n
"
);
VecGetLocalSize
(
f_primal
,
&
size
);
TEST_EXIT_DBG
(
size
==
nRankPrimals
*
nComponents
)(
"Should not happen!
\n
"
);
}
...
...
@@ -865,17 +942,18 @@ namespace AMDiS {
VecRestoreArray
(
f_primal
,
&
local_f_primal
);
// === Create solver and solve the overall FETI system. ===
KSP
ksp
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp
);
KSPSetOperators
(
ksp
,
A
,
A
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
ksp
);
KSPSolve
(
ksp
,
f
,
b
);
// === Reconstruct FETI solution vectors. ===
Vec
u_b
,
u_primal
;
VecDuplicate
(
f_b
,
&
u_b
);
...
...
@@ -896,15 +974,11 @@ namespace AMDiS {
local_u_b
[
i
]
=
local_b
[
index
++
];
for
(
int
i
=
0
;
i
<
nRankPrimals
*
nComponents
;
i
++
)
local_u_primal
[
i
]
=
local_b
[
index
++
];
for
(
int
i
=
0
;
i
<
nRankLagrange
;
i
++
)
{
MSG
(
"MY %d-ith Lagrange: %f
\n
"
,
i
,
local_b
[
index
++
]);
}
VecRestoreArray
(
f
,
&
local_b
);
VecRestoreArray
(
u_b
,
&
local_u_b
);
VecRestoreArray
(
u_primal
,
&
local_u_primal
);
recoverSolution
(
u_b
,
u_primal
,
vec
);
VecDestroy
(
u_b
);
...
...
@@ -913,17 +987,24 @@ namespace AMDiS {
VecDestroy
(
f
);
KSPDestroy
(
ksp
);
}
}
else
{
void
PetscSolverFeti
::
solveReducedFetiMatrix
(
SystemVector
&
vec
)
{
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
// === Create solver for the non primal (thus local) variables. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp_b
);
KSPSetOperators
(
ksp_b
,
mat_b_b
,
mat_b_b
,
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
ksp_b
,
"solver_b_"
);
KSPSetFromOptions
(
ksp_b
);
// RHS and solution vector.
Vec
vec_rhs
;
// Some temporary vectors.
Vec
tmp_b0
,
tmp_b1
,
tmp_lagrange0
,
tmp_primal0
,
tmp_primal1
;
MatGetVecs
(
mat_lagrange
,
PETSC_NULL
,
&
tmp_lagrange0
);
MatGetVecs
(
mat_lagrange
,
PETSC_NULL
,
&
vec_rhs
);
...
...
@@ -943,7 +1024,6 @@ namespace AMDiS {
MatMult
(
mat_primal_b
,
tmp_b0
,
tmp_primal0
);
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_b
// VecAXPBY(tmp_primal0, 1.0, -1.0, f_primal);
VecAXPBY
(
tmp_primal0
,
-
1.0
,
1.0
,
f_primal
);
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_b)
...
...
@@ -999,7 +1079,6 @@ namespace AMDiS {
// === Destroy all data structures. ===
VecDestroy
(
vec_rhs
);
VecDestroy
(
tmp_b0
);
VecDestroy
(
tmp_b1
);
...
...
@@ -1024,6 +1103,22 @@ namespace AMDiS {
destroyFetiKsp
();
}
void
PetscSolverFeti
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
{
FUNCNAME
(
"PetscSolverFeti::solvePetscMatrix()"
);
int
debug
=
0
;
Parameters
::
get
(
"parallel->debug feti"
,
debug
);
if
(
debug
)
{
WARNING
(
"FETI matrix is solved globally, thus without reducing to the lagrange multipliers!
\n
"
);
solveFetiMatrix
(
vec
);
}
else
{
solveReducedFetiMatrix
(
vec
);
}
}
#endif
...
...
AMDiS/src/parallel/PetscSolverFeti.h
View file @
8b20a02e
...
...
@@ -33,47 +33,81 @@ namespace AMDiS {
#ifdef HAVE_PETSC_DEV
/** \brief
* This structure is used when defining the MatShell operation for solving
* primal schur complement. \ref petscMultMatSchurPrimal
*/
struct
PetscSchurPrimalData
{
Mat
*
mat_primal_b
,
*
mat_b_primal
,
*
mat_primal_primal
;
/// Pointers to the matrix containing the primal variables.
Mat
*
mat_primal_primal
;
/// Coupling matrices between the primal and the B variables.
Mat
*
mat_primal_b
,
*
mat_b_primal
;
/// Temporal vector on the B variables.
Vec
tmp_vec_b
;
/// Temporal vecor in the primal variables.
Vec
tmp_vec_primal
;
/// Pointer to the solver for \ref PetscSolverFeti::mat_bb.
KSP
*
ksp_b
;
};
/** \brief
* This structure is used when defining the FETI-DP operator for solving
* the system matrix reduced to the Lagrange multipliers.
* \ref petscMultMatFeti
*/
struct
PetscFetiData
{
Mat
*
mat_primal_b
,
*
mat_b_primal
,
*
mat_primal_primal
;
/// Pointers to the matrix containing the primal variables.
Mat
*
mat_primal_primal
;
/// Coupling matrices between the primal and the B variables.
Mat
*
mat_primal_b
,
*
mat_b_primal
;
/// Matrix of Lagrange variables.
Mat
*
mat_lagrange
;