Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
amdis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Backofen, Rainer
amdis
Commits
805f4b62
Commit
805f4b62
authored
May 16, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Added FETI-DP timings.
parent
d00516d6
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
166 additions
and
80 deletions
+166
-80
AMDiS/src/parallel/PetscSolverFeti.cc
AMDiS/src/parallel/PetscSolverFeti.cc
+119
-39
AMDiS/src/parallel/PetscSolverFeti.h
AMDiS/src/parallel/PetscSolverFeti.h
+3
-1
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+33
-37
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
+11
-3
No files found.
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
805f4b62
...
@@ -189,10 +189,11 @@ namespace AMDiS {
...
@@ -189,10 +189,11 @@ namespace AMDiS {
:
PetscSolver
(),
:
PetscSolver
(),
schurPrimalSolver
(
0
),
schurPrimalSolver
(
0
),
multiLevelTest
(
false
),
multiLevelTest
(
false
),
sub
DomainSolver
(
NULL
),
sub
domain
(
NULL
),
meshLevel
(
0
),
meshLevel
(
0
),
rStartInterior
(
0
),
rStartInterior
(
0
),
nGlobalOverallInterior
(
0
)
nGlobalOverallInterior
(
0
),
printTimings
(
false
)
{
{
FUNCNAME
(
"PetscSolverFeti::PetscSolverFeti()"
);
FUNCNAME
(
"PetscSolverFeti::PetscSolverFeti()"
);
...
@@ -220,6 +221,8 @@ namespace AMDiS {
...
@@ -220,6 +221,8 @@ namespace AMDiS {
Parameters
::
get
(
"parallel->multi level test"
,
multiLevelTest
);
Parameters
::
get
(
"parallel->multi level test"
,
multiLevelTest
);
if
(
multiLevelTest
)
if
(
multiLevelTest
)
meshLevel
=
1
;
meshLevel
=
1
;
Parameters
::
get
(
"parallel->print timings"
,
printTimings
);
}
}
...
@@ -232,17 +235,17 @@ namespace AMDiS {
...
@@ -232,17 +235,17 @@ namespace AMDiS {
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
if
(
sub
DomainSolver
==
NULL
)
{
if
(
sub
domain
==
NULL
)
{
sub
DomainSolver
=
new
PetscSolverGlobalMatrix
();
sub
domain
=
new
PetscSolverGlobalMatrix
();
if
(
meshLevel
==
0
)
{
if
(
meshLevel
==
0
)
{
sub
DomainSolver
->
setMeshDistributor
(
meshDistributor
,
sub
domain
->
setMeshDistributor
(
meshDistributor
,
mpiCommGlobal
,
mpiCommLocal
);
mpiCommGlobal
,
mpiCommLocal
);
}
else
{
}
else
{
sub
DomainSolver
->
setMeshDistributor
(
meshDistributor
,
sub
domain
->
setMeshDistributor
(
meshDistributor
,
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
levelData
.
getMpiComm
(
meshLevel
));
sub
DomainSolver
->
setLevel
(
meshLevel
);
sub
domain
->
setLevel
(
meshLevel
);
}
}
}
}
...
@@ -289,6 +292,8 @@ namespace AMDiS {
...
@@ -289,6 +292,8 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::createFetiData()"
);
FUNCNAME
(
"PetscSolverFeti::createFetiData()"
);
double
timeCounter
=
MPI
::
Wtime
();
TEST_EXIT
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT
(
meshDistributor
->
getFeSpaces
().
size
()
>
0
)
TEST_EXIT
(
meshDistributor
->
getFeSpaces
().
size
()
>
0
)
(
"No FE space defined in mesh distributor!
\n
"
);
(
"No FE space defined in mesh distributor!
\n
"
);
...
@@ -384,7 +389,13 @@ namespace AMDiS {
...
@@ -384,7 +389,13 @@ namespace AMDiS {
}
}
// If multi level test, inform sub domain solver about coarse space.
// If multi level test, inform sub domain solver about coarse space.
subDomainSolver
->
setDofMapping
(
&
localDofMap
,
&
primalDofMap
);
subdomain
->
setDofMapping
(
&
localDofMap
,
&
primalDofMap
);
if
(
printTimings
)
{
timeCounter
=
MPI
::
Wtime
()
-
timeCounter
;
MSG
(
"FETI-DP timing 01: %.5f seconds (creation of basic data structures)"
,
timeCounter
);
}
}
}
...
@@ -644,6 +655,8 @@ namespace AMDiS {
...
@@ -644,6 +655,8 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::createMatLagrange()"
);
FUNCNAME
(
"PetscSolverFeti::createMatLagrange()"
);
double
wtime
=
MPI
::
Wtime
();
// === Create distributed matrix for Lagrange constraints. ===
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
mpiCommGlobal
,
MatCreateMPIAIJ
(
mpiCommGlobal
,
...
@@ -694,6 +707,10 @@ namespace AMDiS {
...
@@ -694,6 +707,10 @@ namespace AMDiS {
MatAssemblyBegin
(
mat_lagrange
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
mat_lagrange
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat_lagrange
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat_lagrange
,
MAT_FINAL_ASSEMBLY
);
if
(
printTimings
)
MSG
(
"FETI-DP timing 05: %.5f seconds (creation of lagrange constraint matrix)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
}
...
@@ -704,7 +721,7 @@ namespace AMDiS {
...
@@ -704,7 +721,7 @@ namespace AMDiS {
if
(
schurPrimalSolver
==
0
)
{
if
(
schurPrimalSolver
==
0
)
{
MSG
(
"Create iterative schur primal solver!
\n
"
);
MSG
(
"Create iterative schur primal solver!
\n
"
);
schurPrimalData
.
subSolver
=
sub
DomainSolver
;
schurPrimalData
.
subSolver
=
sub
domain
;
VecCreateMPI
(
mpiCommGlobal
,
VecCreateMPI
(
mpiCommGlobal
,
localDofMap
.
getRankDofs
(),
localDofMap
.
getRankDofs
(),
...
@@ -757,13 +774,13 @@ namespace AMDiS {
...
@@ -757,13 +774,13 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nRowsRankB
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nRowsRankB
;
i
++
)
{
PetscInt
row
=
localDofMap
.
getStartDofs
()
+
i
;
PetscInt
row
=
localDofMap
.
getStartDofs
()
+
i
;
MatGetRow
(
sub
DomainSolver
->
getMatIntCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
MatGetRow
(
sub
domain
->
getMatIntCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
if
(
values
[
j
]
!=
0.0
)
if
(
values
[
j
]
!=
0.0
)
mat_b_primal_cols
[
cols
[
j
]].
push_back
(
make_pair
(
i
,
values
[
j
]));
mat_b_primal_cols
[
cols
[
j
]].
push_back
(
make_pair
(
i
,
values
[
j
]));
MatRestoreRow
(
sub
DomainSolver
->
getMatIntCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
MatRestoreRow
(
sub
domain
->
getMatIntCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
}
}
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
...
@@ -782,7 +799,7 @@ namespace AMDiS {
...
@@ -782,7 +799,7 @@ namespace AMDiS {
VecAssemblyBegin
(
tmpVec
);
VecAssemblyBegin
(
tmpVec
);
VecAssemblyEnd
(
tmpVec
);
VecAssemblyEnd
(
tmpVec
);
sub
DomainSolver
->
solve
(
tmpVec
,
tmpVec
);
sub
domain
->
solve
(
tmpVec
,
tmpVec
);
PetscScalar
*
tmpValues
;
PetscScalar
*
tmpValues
;
VecGetArray
(
tmpVec
,
&
tmpValues
);
VecGetArray
(
tmpVec
,
&
tmpValues
);
...
@@ -800,17 +817,13 @@ namespace AMDiS {
...
@@ -800,17 +817,13 @@ namespace AMDiS {
MatAssemblyBegin
(
matBPi
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
matBPi
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
matBPi
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
matBPi
,
MAT_FINAL_ASSEMBLY
);
MatDuplicate
(
sub
DomainSolver
->
getMatCoarseCoarse
(),
MAT_COPY_VALUES
,
&
mat_schur_primal
);
MatDuplicate
(
sub
domain
->
getMatCoarseCoarse
(),
MAT_COPY_VALUES
,
&
mat_schur_primal
);
MatMatMult
(
sub
DomainSolver
->
getMatCoarseInt
(),
matBPi
,
MAT_INITIAL_MATRIX
,
PETSC_DEFAULT
,
&
matPrimal
);
MatMatMult
(
sub
domain
->
getMatCoarseInt
(),
matBPi
,
MAT_INITIAL_MATRIX
,
PETSC_DEFAULT
,
&
matPrimal
);
MatAXPY
(
mat_schur_primal
,
-
1.0
,
matPrimal
,
DIFFERENT_NONZERO_PATTERN
);
MatAXPY
(
mat_schur_primal
,
-
1.0
,
matPrimal
,
DIFFERENT_NONZERO_PATTERN
);
MatDestroy
(
&
matPrimal
);
MatDestroy
(
&
matPrimal
);
MatDestroy
(
&
matBPi
);
MatDestroy
(
&
matBPi
);
MatInfo
minfo
;
MatGetInfo
(
mat_schur_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
KSPCreate
(
mpiCommGlobal
,
&
ksp_schur_primal
);
KSPCreate
(
mpiCommGlobal
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
SAME_NONZERO_PATTERN
);
SAME_NONZERO_PATTERN
);
...
@@ -822,8 +835,20 @@ namespace AMDiS {
...
@@ -822,8 +835,20 @@ namespace AMDiS {
PCFactorSetMatSolverPackage
(
pc_schur_primal
,
MATSOLVERMUMPS
);
PCFactorSetMatSolverPackage
(
pc_schur_primal
,
MATSOLVERMUMPS
);
KSPSetFromOptions
(
ksp_schur_primal
);
KSPSetFromOptions
(
ksp_schur_primal
);
MSG
(
"Creating Schur primal matrix needed %.5f seconds.
\n
"
,
if
(
printTimings
)
{
MPI
::
Wtime
()
-
wtime
);
MatInfo
minfo
;
MatGetInfo
(
mat_schur_primal
,
MAT_GLOBAL_SUM
,
&
minfo
);
MSG
(
"Schur primal matrix nnz = %f
\n
"
,
minfo
.
nz_used
);
MSG
(
"FETI-DP timing 06: %.5f seconds (creation of schur primal matrix)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
wtime
=
MPI
::
Wtime
();
KSPSetUp
(
ksp_schur_primal
);
KSPSetUpOnBlocks
(
ksp_schur_primal
);
MSG
(
"FETI-DP timing 07: %.5f seconds (factorization of primal schur matrix).
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
}
}
}
}
...
@@ -851,7 +876,7 @@ namespace AMDiS {
...
@@ -851,7 +876,7 @@ namespace AMDiS {
// === Create FETI-DP solver object. ===
// === Create FETI-DP solver object. ===
fetiData
.
mat_lagrange
=
&
mat_lagrange
;
fetiData
.
mat_lagrange
=
&
mat_lagrange
;
fetiData
.
subSolver
=
sub
DomainSolver
;
fetiData
.
subSolver
=
sub
domain
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
VecCreateMPI
(
mpiCommGlobal
,
VecCreateMPI
(
mpiCommGlobal
,
...
@@ -942,6 +967,17 @@ namespace AMDiS {
...
@@ -942,6 +967,17 @@ namespace AMDiS {
PCSetType
(
precon_feti
,
PCSHELL
);
PCSetType
(
precon_feti
,
PCSHELL
);
PCShellSetContext
(
precon_feti
,
static_cast
<
void
*>
(
&
fetiDirichletPreconData
));
PCShellSetContext
(
precon_feti
,
static_cast
<
void
*>
(
&
fetiDirichletPreconData
));
PCShellSetApply
(
precon_feti
,
petscApplyFetiDirichletPrecon
);
PCShellSetApply
(
precon_feti
,
petscApplyFetiDirichletPrecon
);
// For the case, that we want to print the timings, we force the LU
// factorization of the local problems to be done here explicitly.
if
(
printTimings
)
{
double
wtime
=
MPI
::
Wtime
();
KSPSetUp
(
ksp_interior
);
KSPSetUpOnBlocks
(
ksp_interior
);
MSG
(
"FETI-DP timing 08: %.5f seconds (factorization of Dirichlet preconditoner matrices)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
break
;
break
;
...
@@ -1142,13 +1178,21 @@ namespace AMDiS {
...
@@ -1142,13 +1178,21 @@ namespace AMDiS {
createFetiData
();
createFetiData
();
// === Create matrices for the FETI-DP method. ===
// === Create matrices for the FETI-DP method. ===
subDomainSolver
->
fillPetscMatrix
(
mat
);
double
wtime
=
MPI
::
Wtime
();
subdomain
->
fillPetscMatrix
(
mat
);
if
(
printTimings
)
MSG
(
"FETI-DP timing 02: %.5f seconds (creation of interior matrices)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
// === Create matrices for FETI-DP preconditioner. ===
// === Create matrices for FETI-DP preconditioner. ===
if
(
fetiPreconditioner
!=
FETI_NONE
)
{
if
(
fetiPreconditioner
!=
FETI_NONE
)
{
wtime
=
MPI
::
Wtime
();
int
nRowsDual
=
dualDofMap
.
getRankDofs
();
int
nRowsDual
=
dualDofMap
.
getRankDofs
();
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
...
@@ -1298,8 +1342,24 @@ namespace AMDiS {
...
@@ -1298,8 +1342,24 @@ namespace AMDiS {
MatAssemblyBegin
(
mat_duals_interior
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
mat_duals_interior
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat_duals_interior
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat_duals_interior
,
MAT_FINAL_ASSEMBLY
);
}
}
if
(
printTimings
)
MSG
(
"FETI-DP timing 03: %.5f seconds (creation of preconditioner matrices)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
}
// For the case, that we want to print the timings, we force the LU
// factorization of the local problems to be done here explicitly.
if
(
printTimings
)
{
wtime
=
MPI
::
Wtime
();
KSPSetUp
(
subdomain
->
getSolver
());
KSPSetUpOnBlocks
(
subdomain
->
getSolver
());
MSG
(
"FETI-DP timing 04: %.5f seconds (factorization of subdomain matrices)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
// === Create and fill PETSc matrix for Lagrange constraints. ===
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
(
feSpaces
);
createMatLagrange
(
feSpaces
);
...
@@ -1320,7 +1380,7 @@ namespace AMDiS {
...
@@ -1320,7 +1380,7 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
sub
DomainSolver
->
fillPetscRhs
(
vec
);
sub
domain
->
fillPetscRhs
(
vec
);
}
}
...
@@ -1364,41 +1424,56 @@ namespace AMDiS {
...
@@ -1364,41 +1424,56 @@ namespace AMDiS {
// === Create new rhs ===
// === Create new rhs ===
double
wtime
=
MPI
::
Wtime
();
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vec_rhs = L * inv(K_BB) * f_B
// vec_rhs = L * inv(K_BB) * f_B
sub
DomainSolver
->
solveGlobal
(
subDomainSolver
->
getRhsInterior
(),
tmp_b0
);
sub
domain
->
solveGlobal
(
subdomain
->
getRhsInterior
(),
tmp_b0
);
MatMult
(
mat_lagrange
,
tmp_b0
,
vec_rhs
);
MatMult
(
mat_lagrange
,
tmp_b0
,
vec_rhs
);
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
MatMult
(
sub
DomainSolver
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal0
);
MatMult
(
sub
domain
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal0
);
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_B
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_B
VecAXPBY
(
tmp_primal0
,
1.0
,
-
1.0
,
sub
DomainSolver
->
getRhsCoarseSpace
());
VecAXPBY
(
tmp_primal0
,
1.0
,
-
1.0
,
sub
domain
->
getRhsCoarseSpace
());
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
KSPSolve
(
ksp_schur_primal
,
tmp_primal0
,
tmp_primal0
);
KSPSolve
(
ksp_schur_primal
,
tmp_primal0
,
tmp_primal0
);
//
//
MatMult
(
sub
DomainSolver
->
getMatIntCoarse
(),
tmp_primal0
,
tmp_b0
);
MatMult
(
sub
domain
->
getMatIntCoarse
(),
tmp_primal0
,
tmp_b0
);
sub
DomainSolver
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
sub
domain
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
MatMult
(
mat_lagrange
,
tmp_b0
,
tmp_lagrange0
);
MatMult
(
mat_lagrange
,
tmp_b0
,
tmp_lagrange0
);
//
//
VecAXPBY
(
vec_rhs
,
-
1.0
,
1.0
,
tmp_lagrange0
);
VecAXPBY
(
vec_rhs
,
-
1.0
,
1.0
,
tmp_lagrange0
);
if
(
printTimings
)
{
MSG
(
"FETI-DP timing 09: %.5f seconds (create rhs vector)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
wtime
=
MPI
::
Wtime
();
}
// === Solve with FETI-DP operator. ===
// === Solve with FETI-DP operator. ===
KSPSolve
(
ksp_feti
,
vec_rhs
,
vec_rhs
);
KSPSolve
(
ksp_feti
,
vec_rhs
,
vec_rhs
);
if
(
printTimings
)
{
MSG
(
"FETI-DP timing 10: %.5f seconds (application of FETI-DP operator)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
wtime
=
MPI
::
Wtime
();
}
// === Solve for u_primals. ===
// === Solve for u_primals. ===
VecCopy
(
sub
DomainSolver
->
getRhsCoarseSpace
(),
tmp_primal0
);
VecCopy
(
sub
domain
->
getRhsCoarseSpace
(),
tmp_primal0
);
sub
DomainSolver
->
solveGlobal
(
subDomainSolver
->
getRhsInterior
(),
tmp_b0
);
sub
domain
->
solveGlobal
(
subdomain
->
getRhsInterior
(),
tmp_b0
);
MatMult
(
sub
DomainSolver
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal1
);
MatMult
(
sub
domain
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal1
);
VecAXPBY
(
tmp_primal0
,
-
1.0
,
1.0
,
tmp_primal1
);
VecAXPBY
(
tmp_primal0
,
-
1.0
,
1.0
,
tmp_primal1
);
MatMultTranspose
(
mat_lagrange
,
vec_rhs
,
tmp_b0
);
MatMultTranspose
(
mat_lagrange
,
vec_rhs
,
tmp_b0
);
sub
DomainSolver
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
sub
domain
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
MatMult
(
sub
DomainSolver
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal1
);
MatMult
(
sub
domain
->
getMatCoarseInt
(),
tmp_b0
,
tmp_primal1
);
VecAXPBY
(
tmp_primal0
,
1.0
,
1.0
,
tmp_primal1
);
VecAXPBY
(
tmp_primal0
,
1.0
,
1.0
,
tmp_primal1
);
KSPSolve
(
ksp_schur_primal
,
tmp_primal0
,
tmp_primal0
);
KSPSolve
(
ksp_schur_primal
,
tmp_primal0
,
tmp_primal0
);
...
@@ -1406,18 +1481,23 @@ namespace AMDiS {
...
@@ -1406,18 +1481,23 @@ namespace AMDiS {
// === Solve for u_b. ===
// === Solve for u_b. ===
VecCopy
(
sub
DomainSolver
->
getRhsInterior
(),
tmp_b0
);
VecCopy
(
sub
domain
->
getRhsInterior
(),
tmp_b0
);
MatMultTranspose
(
mat_lagrange
,
vec_rhs
,
tmp_b1
);
MatMultTranspose
(
mat_lagrange
,
vec_rhs
,
tmp_b1
);
VecAXPBY
(
tmp_b0
,
-
1.0
,
1.0
,
tmp_b1
);
VecAXPBY
(
tmp_b0
,
-
1.0
,
1.0
,
tmp_b1
);
MatMult
(
sub
DomainSolver
->
getMatIntCoarse
(),
tmp_primal0
,
tmp_b1
);
MatMult
(
sub
domain
->
getMatIntCoarse
(),
tmp_primal0
,
tmp_b1
);
VecAXPBY
(
tmp_b0
,
-
1.0
,
1.0
,
tmp_b1
);
VecAXPBY
(
tmp_b0
,
-
1.0
,
1.0
,
tmp_b1
);
sub
DomainSolver
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
sub
domain
->
solveGlobal
(
tmp_b0
,
tmp_b0
);
// === And recover AMDiS solution vectors. ===
// === And recover AMDiS solution vectors. ===
recoverSolution
(
tmp_b0
,
tmp_primal0
,
vec
);
recoverSolution
(
tmp_b0
,
tmp_primal0
,
vec
);
if
(
printTimings
)
{
MSG
(
"FETI-DP timing 11: %.5f seconds (Inner solve and solution recovery)
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
VecDestroy
(
&
vec_rhs
);
VecDestroy
(
&
vec_rhs
);
VecDestroy
(
&
tmp_b0
);
VecDestroy
(
&
tmp_b0
);
VecDestroy
(
&
tmp_b1
);
VecDestroy
(
&
tmp_b1
);
...
@@ -1448,7 +1528,7 @@ namespace AMDiS {
...
@@ -1448,7 +1528,7 @@ namespace AMDiS {
destroyFetiKsp
();
destroyFetiKsp
();
sub
DomainSolver
->
destroyMatrixData
();
sub
domain
->
destroyMatrixData
();
}
}
...
@@ -1456,7 +1536,7 @@ namespace AMDiS {
...
@@ -1456,7 +1536,7 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::destroyVectorData()"
);
FUNCNAME
(
"PetscSolverFeti::destroyVectorData()"
);
sub
DomainSolver
->
destroyVectorData
();
sub
domain
->
destroyVectorData
();
}
}
...
...
AMDiS/src/parallel/PetscSolverFeti.h
View file @
805f4b62
...
@@ -258,13 +258,15 @@ namespace AMDiS {
...
@@ -258,13 +258,15 @@ namespace AMDiS {
bool
multiLevelTest
;
bool
multiLevelTest
;
PetscSolver
*
sub
DomainSolver
;
PetscSolver
*
sub
domain
;
int
meshLevel
;
int
meshLevel
;
int
rStartInterior
;
int
rStartInterior
;
int
nGlobalOverallInterior
;
int
nGlobalOverallInterior
;
bool
printTimings
;
};
};
}
}
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
805f4b62
...
@@ -332,14 +332,16 @@ namespace AMDiS {
...
@@ -332,14 +332,16 @@ namespace AMDiS {
TEST_EXIT_DBG
(
interiorMap
)(
"No parallel DOF map defined!
\n
"
);
TEST_EXIT_DBG
(
interiorMap
)(
"No parallel DOF map defined!
\n
"
);
// === Transfer values from DOF vector to the PETSc vector. ===
if
(
coarseSpaceMap
)
{
if
(
coarseSpaceMap
)
{
fillPetscRhsWithCoarseSpace
(
vec
);
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
rhsInterior
,
rhsCoarseSpace
,
vec
->
getDOFVector
(
i
),
i
);
}
else
{
}
else
{
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
}
}
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
...
@@ -360,27 +362,6 @@ namespace AMDiS {
...
@@ -360,27 +362,6 @@ namespace AMDiS {
}
}
void
PetscSolverGlobalMatrix
::
fillPetscRhsWithCoarseSpace
(
SystemVector
*
vec
)
{
FUNCNAME
(
"SubDomainSolver::fillPetscRhs()"
);
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
vec
->
getDOFVector
(
i
)
->
getFeSpace
();
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
int
index
=
dofIt
.
getDOFIndex
();
if
(
isCoarseSpace
(
feSpace
,
index
))
{
index
=
coarseSpaceMap
->
getMatIndex
(
i
,
index
);
VecSetValue
(
rhsCoarseSpace
,
index
,
*
dofIt
,
ADD_VALUES
);
}
else
{
index
=
interiorMap
->
getMatIndex
(
i
,
index
)
+
rStartInterior
;
VecSetValue
(
rhsInterior
,
index
,
*
dofIt
,
ADD_VALUES
);
}
}
}
}
void
PetscSolverGlobalMatrix
::
solvePetscMatrix
(
SystemVector
&
vec
,
void
PetscSolverGlobalMatrix
::
solvePetscMatrix
(
SystemVector
&
vec
,
AdaptInfo
*
adaptInfo
)
AdaptInfo
*
adaptInfo
)
{
{
...
@@ -704,7 +685,8 @@ namespace AMDiS {
...
@@ -704,7 +685,8 @@ namespace AMDiS {
}
}
void
PetscSolverGlobalMatrix
::
setDofVector
(
Vec
&
petscVec
,
void
PetscSolverGlobalMatrix
::
setDofVector
(
Vec
vecInterior
,
Vec
vecCoarse
,
DOFVector
<
double
>*
vec
,
DOFVector
<
double
>*
vec
,
int
nRowVec
,
int
nRowVec
,
bool
rankOnly
)
bool
rankOnly
)
...
@@ -726,24 +708,38 @@ namespace AMDiS {
...
@@ -726,24 +708,38 @@ namespace AMDiS {
// Get PETSc's mat index of the row DOF.
// Get PETSc's mat index of the row DOF.
int
index
=
0
;
int
index
=
0
;
if
(
interiorMap
->
isMatIndexFromGlobal
())
if
(
interiorMap
->
isMatIndexFromGlobal
())
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
);
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
)
+
rStartInterior
;
else
else
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
dofIt
.
getDOFIndex
());
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
dofIt
.
getDOFIndex
())
+
rStartInterior
;