Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
537e10f4
Commit
537e10f4
authored
Sep 28, 2012
by
Thomas Witkowski
Browse files
Moved debug funciton in FETI-DP code to own debugging class.
parent
e0bc0d8a
Changes
8
Show whitespace changes
Inline
Side-by-side
AMDiS/CMakeLists.txt
View file @
537e10f4
...
...
@@ -261,6 +261,8 @@ if(ENABLE_PARALLEL_DOMAIN)
${
SOURCE_DIR
}
/parallel/PetscSolver.cc
${
SOURCE_DIR
}
/parallel/PetscProblemStat.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFeti.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFetiDebug.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFetiMonitor.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFetiOperators.cc
${
SOURCE_DIR
}
/parallel/PetscSolverFetiTimings.cc
${
SOURCE_DIR
}
/parallel/PetscSolverGlobalMatrix.cc
...
...
AMDiS/src/AMDiS_fwd.h
View file @
537e10f4
...
...
@@ -103,6 +103,8 @@ namespace AMDiS {
class
FeSpaceDofMap
;
class
MatrixNnzStructure
;
class
MeshLevelData
;
class
PetscSolverFeti
;
class
PetscSolverFetiDebug
;
#endif
struct
BoundaryObject
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
537e10f4
...
...
@@ -13,6 +13,8 @@
#include
"AMDiS.h"
#include
"MatrixVector.h"
#include
"parallel/PetscSolverFeti.h"
#include
"parallel/PetscSolverFetiDebug.h"
#include
"parallel/PetscSolverFetiMonitor.h"
#include
"parallel/PetscSolverFetiStructs.h"
#include
"parallel/PetscSolverFetiOperators.h"
#include
"parallel/PetscSolverFetiTimings.h"
...
...
@@ -25,32 +27,6 @@ namespace AMDiS {
using
namespace
std
;
PetscErrorCode
KSPMonitorFeti
(
KSP
ksp
,
PetscInt
n
,
PetscReal
rnorm
,
void
*
data
)
{
Vec
Br
,
v
,
w
;
VecDuplicate
(
static_cast
<
FetiKspData
*>
(
data
)
->
draft
,
&
v
);
VecDuplicate
(
static_cast
<
FetiKspData
*>
(
data
)
->
draft
,
&
w
);
KSPBuildResidual
(
ksp
,
v
,
w
,
&
Br
);
Vec
nest0
,
nest1
;
VecNestGetSubVec
(
Br
,
0
,
&
nest0
);
VecNestGetSubVec
(
Br
,
1
,
&
nest1
);
PetscScalar
norm
,
norm0
,
norm1
;
VecNorm
(
Br
,
NORM_2
,
&
norm
);
VecNorm
(
nest0
,
NORM_2
,
&
norm0
);
VecNorm
(
nest1
,
NORM_2
,
&
norm1
);
VecDestroy
(
&
v
);
VecDestroy
(
&
w
);
PetscPrintf
(
PETSC_COMM_WORLD
,
"%3D KSP residual norm %1.12e [%1.12e %1.12e] and preconditioned norm [%1.12e]
\n
"
,
n
,
norm
,
norm0
,
norm1
,
rnorm
);
return
0
;
}
PetscSolverFeti
::
PetscSolverFeti
()
:
PetscSolver
(),
schurPrimalSolver
(
0
),
...
...
@@ -1162,22 +1138,9 @@ namespace AMDiS {
Vec
nullSpaceBasis
;
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
vecArray
,
&
nullSpaceBasis
);
int
writeFetiSystem
=
0
;
Parameters
::
get
(
"parallel->debug->write feti system"
,
writeFetiSystem
);
if
(
writeFetiSystem
)
{
Vec
vecTmp
;
createExplicitVec
(
nullSpaceBasis
,
vecTmp
);
PetscViewer
petscView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"nullspace.vec"
,
FILE_MODE_WRITE
,
&
petscView
);
VecView
(
vecTmp
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
VecDestroy
(
&
vecTmp
);
MSG
(
"Written FETI-DP null space basis vector!
\n
"
);
}
#if (DEBUG != 0)
PetscSolverFetiDebug
::
writeNullSpace
(
*
this
,
nullSpaceBasis
);
#endif
MatNullSpace
matNullSpace
;
MatNullSpaceCreate
(
mpiCommGlobal
,
PETSC_FALSE
,
1
,
&
nullSpaceBasis
,
...
...
@@ -1186,27 +1149,6 @@ namespace AMDiS {
KSPSetNullSpace
(
ksp_feti
,
matNullSpace
);
MatNullSpaceDestroy
(
&
matNullSpace
);
//#if (DEBUG != 0)
Vec
vecSol
;
VecDuplicate
(
nullSpaceBasis
,
&
vecSol
);
MatMult
(
mat_feti
,
nullSpaceBasis
,
vecSol
);
Vec
vecSol0
,
vecSol1
;
VecNestGetSubVec
(
vecSol
,
0
,
&
vecSol0
);
VecNestGetSubVec
(
vecSol
,
1
,
&
vecSol1
);
PetscReal
norm
,
norm0
,
norm1
;
VecNorm
(
vecSol
,
NORM_2
,
&
norm
);
VecNorm
(
vecSol0
,
NORM_2
,
&
norm0
);
VecNorm
(
vecSol1
,
NORM_2
,
&
norm1
);
MSG
(
"NORM: %e (%e %e)
\n
"
,
norm
,
norm0
,
norm1
);
VecDestroy
(
&
vecSol
);
//#endif
// AMDiS::finalize();
// exit(0);
VecDestroy
(
&
ktest0
);
VecDestroy
(
&
ktest1
);
VecDestroy
(
&
ktest2
);
...
...
@@ -1772,7 +1714,10 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
debugNullSpace
(
vec
);
#if (DEBUG != 0)
if
(
stokesMode
)
PetscSolverFetiDebug
::
debugNullSpace
(
*
this
,
vec
);
#endif
// === Some temporary vectors. ===
...
...
@@ -1883,7 +1828,7 @@ namespace AMDiS {
}
// === Optionally run some debug procedures on the FETI-DP system. ===
debugFeti
(
vecRhs
);
PetscSolverFetiDebug
::
debugFeti
(
*
this
,
vecRhs
);
// === Solve with FETI-DP operator. ===
KSPSolve
(
ksp_feti
,
vecRhs
,
vecSol
);
...
...
@@ -2067,447 +2012,4 @@ namespace AMDiS {
}
}
void
PetscSolverFeti
::
createInteriorMat
(
Mat
&
mat
)
{
FUNCNAME
(
"PetscSolverFeti::createInteriorMat()"
);
Mat
&
localMat
=
subdomain
->
getMatInterior
();
int
nRow
,
nCol
;
MatGetSize
(
localMat
,
&
nRow
,
&
nCol
);
TEST_EXIT_DBG
(
nRow
==
nCol
)(
"Should not happen!
\n
"
);
int
*
dnnz
=
new
int
[
nRow
];
for
(
int
i
=
0
;
i
<
nRow
;
i
++
)
{
MatGetRow
(
localMat
,
i
,
&
nCol
,
PETSC_NULL
,
PETSC_NULL
);
dnnz
[
i
]
=
nCol
;
MatRestoreRow
(
localMat
,
i
,
&
nCol
,
PETSC_NULL
,
PETSC_NULL
);
}
MatCreateAIJ
(
PETSC_COMM_WORLD
,
nRow
,
nRow
,
nGlobalOverallInterior
,
nGlobalOverallInterior
,
0
,
dnnz
,
0
,
PETSC_NULL
,
&
mat
);
PetscInt
rStart
,
rEnd
;
MatGetOwnershipRange
(
mat
,
&
rStart
,
&
rEnd
);
for
(
int
i
=
0
;
i
<
nRow
;
i
++
)
{
const
PetscInt
*
cols
;
const
PetscScalar
*
vals
;
MatGetRow
(
localMat
,
i
,
&
nCol
,
&
cols
,
&
vals
);
for
(
int
j
=
0
;
j
<
nCol
;
j
++
)
MatSetValue
(
mat
,
i
+
rStart
,
cols
[
j
]
+
rStart
,
vals
[
j
],
INSERT_VALUES
);
}
MatAssemblyBegin
(
mat
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat
,
MAT_FINAL_ASSEMBLY
);
delete
[]
dnnz
;
}
void
PetscSolverFeti
::
createNestedFetiMat
(
Mat
&
mat
)
{
FUNCNAME
(
"PetscSolverFeti::createNestedFetiMat()"
);
if
(
stokesMode
)
{
vector
<
Mat
>
nestMat
;
nestMat
.
resize
(
16
);
createInteriorMat
(
nestMat
[
0
]);
nestMat
[
1
]
=
subdomain
->
getMatInteriorCoarse
(
0
);
nestMat
[
2
]
=
subdomain
->
getMatInteriorCoarse
(
1
);
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
nestMat
[
3
]);
nestMat
[
4
]
=
subdomain
->
getMatCoarseInterior
(
0
);
nestMat
[
5
]
=
subdomain
->
getMatCoarse
(
0
,
0
);
nestMat
[
6
]
=
subdomain
->
getMatCoarse
(
0
,
1
);
nestMat
[
7
]
=
PETSC_NULL
;
nestMat
[
8
]
=
subdomain
->
getMatCoarseInterior
(
1
);
nestMat
[
9
]
=
subdomain
->
getMatCoarse
(
1
,
0
);
nestMat
[
10
]
=
PETSC_NULL
;
nestMat
[
11
]
=
PETSC_NULL
;
nestMat
[
12
]
=
mat_lagrange
;
nestMat
[
13
]
=
PETSC_NULL
;
nestMat
[
14
]
=
PETSC_NULL
;
nestMat
[
15
]
=
PETSC_NULL
;
Mat
nestFetiMat
;
MatCreateNest
(
mpiCommGlobal
,
4
,
PETSC_NULL
,
4
,
PETSC_NULL
,
&
(
nestMat
[
0
]),
&
mat
);
}
else
{
vector
<
Mat
>
nestMat
;
nestMat
.
resize
(
9
);
createInteriorMat
(
nestMat
[
0
]);
nestMat
[
1
]
=
subdomain
->
getMatInteriorCoarse
(
0
);
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
nestMat
[
2
]);
nestMat
[
3
]
=
subdomain
->
getMatCoarseInterior
(
0
);
nestMat
[
4
]
=
subdomain
->
getMatCoarse
(
0
,
0
);
nestMat
[
5
]
=
PETSC_NULL
;
nestMat
[
6
]
=
mat_lagrange
;
nestMat
[
7
]
=
PETSC_NULL
;
nestMat
[
8
]
=
PETSC_NULL
;
Mat
nestFetiMat
;
MatCreateNest
(
mpiCommGlobal
,
3
,
PETSC_NULL
,
3
,
PETSC_NULL
,
&
(
nestMat
[
0
]),
&
mat
);
}
}
void
PetscSolverFeti
::
debugNullSpace
(
SystemVector
&
vec
)
{
FUNCNAME
(
"PetscSolverFeti::debugNullSpace()"
);
TEST_EXIT
(
stokesMode
)(
"This function works only for the stokes mode!
\n
"
);
Mat
fetiMat
;
createNestedFetiMat
(
fetiMat
);
Vec
vecArray
[
4
];
Vec
ktest0
,
ktest1
,
ktest2
;
localDofMap
.
createLocalVec
(
ktest0
);
localDofMap
.
createLocalVec
(
ktest1
);
localDofMap
.
createVec
(
ktest2
,
nGlobalOverallInterior
);
DofMap
&
m
=
localDofMap
[
pressureFeSpace
].
getMap
();
for
(
DofMap
::
iterator
it
=
m
.
begin
();
it
!=
m
.
end
();
++
it
)
{
if
(
meshDistributor
->
getDofMap
()[
pressureFeSpace
].
isRankDof
(
it
->
first
))
{
int
index
=
localDofMap
.
getLocalMatIndex
(
pressureComponent
,
it
->
first
);
VecSetValue
(
ktest0
,
index
,
1.0
,
INSERT_VALUES
);
}
}
VecAssemblyBegin
(
ktest0
);
VecAssemblyEnd
(
ktest0
);
MatMult
(
subdomain
->
getMatInterior
(),
ktest0
,
ktest1
);
PetscScalar
*
valarray
;
VecGetArray
(
ktest0
,
&
valarray
);
VecCreateMPIWithArray
(
PETSC_COMM_WORLD
,
1
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
valarray
,
&
vecArray
[
0
]);
Vec
ktest3
;
VecGetArray
(
ktest1
,
&
valarray
);
VecCreateMPIWithArray
(
PETSC_COMM_WORLD
,
1
,
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
valarray
,
&
ktest3
);
primalDofMap
.
createVec
(
vecArray
[
1
]);
VecSet
(
vecArray
[
1
],
0.0
);
interfaceDofMap
.
createVec
(
vecArray
[
2
]);
VecSet
(
vecArray
[
2
],
1.0
);
lagrangeMap
.
createVec
(
vecArray
[
3
]);
MatMult
(
subdomain
->
getMatInteriorCoarse
(
1
),
vecArray
[
2
],
ktest2
);
VecAXPY
(
ktest2
,
1.0
,
ktest3
);
MatMult
(
mat_lagrange_scaled
,
ktest2
,
vecArray
[
3
]);
VecScale
(
vecArray
[
3
],
-
1.0
);
Vec
nullSpaceBasis
;
VecCreateNest
(
mpiCommGlobal
,
4
,
PETSC_NULL
,
vecArray
,
&
nullSpaceBasis
);
Vec
vecSol
;
VecDuplicate
(
nullSpaceBasis
,
&
vecSol
);
MatMult
(
fetiMat
,
nullSpaceBasis
,
vecSol
);
PetscReal
norm
;
VecNorm
(
vecSol
,
NORM_2
,
&
norm
);
MSG
(
"Null space norm: %e
\n
"
,
norm
);
Vec
vec0
;
Vec
vec1
;
Vec
vec2
;
Vec
vec3
;
VecNestGetSubVec
(
vecSol
,
0
,
&
vec0
);
VecNestGetSubVec
(
vecSol
,
1
,
&
vec1
);
VecNestGetSubVec
(
vecSol
,
2
,
&
vec2
);
VecNestGetSubVec
(
vecSol
,
3
,
&
vec3
);
PetscReal
norm0
,
norm1
,
norm2
,
norm3
;
VecNorm
(
vec0
,
NORM_2
,
&
norm0
);
VecNorm
(
vec1
,
NORM_2
,
&
norm1
);
VecNorm
(
vec2
,
NORM_2
,
&
norm2
);
VecNorm
(
vec3
,
NORM_2
,
&
norm3
);
MSG
(
"Splitted null space norm: %e %e %e %e
\n
"
,
norm0
,
norm1
,
norm2
,
norm3
);
recoverSolution
(
vec0
,
vec1
,
vec
);
recoverInterfaceSolution
(
vec2
,
vec
);
VtkWriter
::
writeFile
(
&
vec
,
"nullspace.vtu"
);
MatDestroy
(
&
fetiMat
);
VecDestroy
(
&
nullSpaceBasis
);
VecDestroy
(
&
vecSol
);
}
void
PetscSolverFeti
::
createExplicitFetiMat
(
Mat
fetiMat
,
Mat
&
explicitMat
,
int
&
nnzCounter
)
{
FUNCNAME
(
"PetscSolverFeti::createExplicitFetiMat()"
);
nnzCounter
=
0
;
if
(
stokesMode
==
false
)
{
int
nRankRows
=
lagrangeMap
.
getRankDofs
();
int
nOverallRows
=
lagrangeMap
.
getOverallDofs
();
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
PETSC_NULL
,
nOverallRows
,
PETSC_NULL
,
&
explicitMat
);
Vec
unitVector
;
Vec
resultVector
;
lagrangeMap
.
createVec
(
unitVector
);
lagrangeMap
.
createVec
(
resultVector
);
PetscInt
low
,
high
;
VecGetOwnershipRange
(
unitVector
,
&
low
,
&
high
);
int
nLocal
=
high
-
low
;
int
nnzCounter
=
0
;
for
(
int
i
=
0
;
i
<
lagrangeMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
,
0.0
);
if
(
i
>=
low
&&
i
<
high
)
VecSetValue
(
unitVector
,
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
);
VecAssemblyEnd
(
unitVector
);
MatMult
(
fetiMat
,
unitVector
,
resultVector
);
if
(
fetiPreconditioner
!=
FETI_NONE
)
{
PCApply
(
precon_feti
,
resultVector
,
unitVector
);
VecCopy
(
unitVector
,
resultVector
);
}
PetscScalar
*
vals
;
VecGetArray
(
resultVector
,
&
vals
);
for
(
int
j
=
0
;
j
<
nLocal
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
low
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
,
&
vals
);
}
VecDestroy
(
&
unitVector
);
VecDestroy
(
&
resultVector
);
}
else
{
int
nRankRows
=
interfaceDofMap
.
getRankDofs
()
+
lagrangeMap
.
getRankDofs
();
int
nOverallRows
=
interfaceDofMap
.
getOverallDofs
()
+
lagrangeMap
.
getOverallDofs
();
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
PETSC_NULL
,
nOverallRows
,
PETSC_NULL
,
&
explicitMat
);
Vec
unitVector
[
2
];
Vec
resultVector
[
2
];
interfaceDofMap
.
createVec
(
unitVector
[
0
]);
interfaceDofMap
.
createVec
(
resultVector
[
0
]);
lagrangeMap
.
createVec
(
unitVector
[
1
]);
lagrangeMap
.
createVec
(
resultVector
[
1
]);
Vec
unitNestVec
,
resultNestVec
;
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
unitVector
,
&
unitNestVec
);
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
resultVector
,
&
resultNestVec
);
PetscInt
lowInterface
,
highInterface
;
VecGetOwnershipRange
(
unitVector
[
0
],
&
lowInterface
,
&
highInterface
);
int
nLocalInterface
=
highInterface
-
lowInterface
;
PetscInt
lowLagrange
,
highLagrange
;
VecGetOwnershipRange
(
unitVector
[
1
],
&
lowLagrange
,
&
highLagrange
);
int
nLocalLagrange
=
highLagrange
-
lowLagrange
;
VecSet
(
unitVector
[
1
],
0.0
);
for
(
int
i
=
0
;
i
<
interfaceDofMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
[
0
],
0.0
);
if
(
i
>=
lowInterface
&&
i
<
highInterface
)
VecSetValue
(
unitVector
[
0
],
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
[
0
]);
VecAssemblyEnd
(
unitVector
[
0
]);
VecAssemblyBegin
(
unitNestVec
);
VecAssemblyEnd
(
unitNestVec
);
MatMult
(
fetiMat
,
unitNestVec
,
resultNestVec
);
PetscScalar
*
vals
;
VecGetArray
(
resultVector
[
0
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalInterface
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
lowInterface
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
0
],
&
vals
);
VecGetArray
(
resultVector
[
1
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalLagrange
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
interfaceDofMap
.
getOverallDofs
()
+
lowLagrange
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
1
],
&
vals
);
}
VecSet
(
unitVector
[
0
],
0.0
);
for
(
int
i
=
0
;
i
<
lagrangeMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
[
1
],
0.0
);
if
(
i
>=
lowLagrange
&&
i
<
highLagrange
)
VecSetValue
(
unitVector
[
1
],
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
[
1
]);
VecAssemblyEnd
(
unitVector
[
1
]);
VecAssemblyBegin
(
unitNestVec
);
VecAssemblyEnd
(
unitNestVec
);
MatMult
(
fetiMat
,
unitNestVec
,
resultNestVec
);
PetscScalar
*
vals
;
VecGetArray
(
resultVector
[
0
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalInterface
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
lowInterface
+
j
,
interfaceDofMap
.
getOverallDofs
()
+
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
0
],
&
vals
);
VecGetArray
(
resultVector
[
1
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalLagrange
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
interfaceDofMap
.
getOverallDofs
()
+
lowLagrange
+
j
,
interfaceDofMap
.
getOverallDofs
()
+
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
1
],
&
vals
);
}
VecDestroy
(
&
unitVector
[
0
]);
VecDestroy
(
&
unitVector
[
1
]);
VecDestroy
(
&
resultVector
[
0
]);
VecDestroy
(
&
resultVector
[
1
]);
VecDestroy
(
&
unitNestVec
);
VecDestroy
(
&
resultNestVec
);
}
MatAssemblyBegin
(
explicitMat
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
explicitMat
,
MAT_FINAL_ASSEMBLY
);
mpi
::
globalAdd
(
nnzCounter
);
}
void
PetscSolverFeti
::
createExplicitVec
(
Vec
nestedVec
,
Vec
&
explicitVec
)
{
FUNCNAME
(
"PetscSolverFeti::createExplicitVec()"
);
int
nNested
=
0
;
VecNestGetSize
(
nestedVec
,
&
nNested
);
TEST_EXIT_DBG
(
nNested
==
2
)
(
"Only supported for 2-nest vectors, not for %d-nest vectors!
\n
"
,
nNested
);
Vec
v0
,
v1
;
VecNestGetSubVec
(
nestedVec
,
0
,
&
v0
);
VecNestGetSubVec
(
nestedVec
,
1
,
&
v1
);
int
s0
,
s1
;
VecGetSize
(
v0
,
&
s0
);
VecGetSize
(
v1
,
&
s1
);
int
l0
,
l1
;
VecGetLocalSize
(
v0
,
&
l0
);
VecGetLocalSize
(
v1
,
&
l1
);
int
rStart0
,
rStart1
;
VecGetOwnershipRange
(
v0
,
&
rStart0
,
PETSC_NULL
);
VecGetOwnershipRange
(
v1
,
&
rStart1
,
PETSC_NULL
);
VecCreateMPI
(
mpiCommGlobal
,
l0
+
l1
,
s0
+
s1
,
&
explicitVec
);
double
*
val
;
VecGetArray
(
v0
,
&
val
);
for
(
int
i
=
0
;
i
<
l0
;
i
++
)
VecSetValue
(
explicitVec
,
rStart0
+
i
,
val
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
v0
,
&
val
);
VecGetArray
(
v1
,
&
val
);
for
(
int
i
=
0
;
i
<
l1
;
i
++
)
VecSetValue
(
explicitVec
,
s0
+
rStart1
+
i
,
val
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
v1
,
&
val
);
VecAssemblyBegin
(
explicitVec
);
VecAssemblyEnd
(
explicitVec
);
}
void
PetscSolverFeti
::
debugFeti
(
Vec
dbgRhsVec
)
{
FUNCNAME
(
"PetscSolverFeti:::debugFeti()"
);
int
writeFetiSystem
=
0
;
Parameters
::
get
(
"parallel->debug->write feti system"
,
writeFetiSystem
);
if
(
!
writeFetiSystem
)
return
;
MSG
(
"Start creating explicit FETI-DP matrix!
\n
"
);
Mat
fetiMat
;
int
nnzCounter
=
0
;
createExplicitFetiMat
(
mat_feti
,
fetiMat
,
nnzCounter
);
PetscViewer
petscView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"feti.mat"
,
FILE_MODE_WRITE
,
&
petscView
);
MatView
(
fetiMat
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
bool
a
=
testMatrixSymmetric
(
fetiMat
,
true
);
MSG
(
"SYMMETRIC TEST: %d
\n
"
,
a
);
int
r
,
c
;
MatGetSize
(
fetiMat
,
&
r
,
&
c
);
MSG
(
"FETI-DP matrix written: %d x %d mat with %d nnz
\n
"
,
r
,
c
,
nnzCounter
);
MatDestroy
(
&
fetiMat
);
Vec
rhsVec
;
createExplicitVec
(
dbgRhsVec
,
rhsVec
);
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"feti_rhs.vec"
,
FILE_MODE_WRITE
,
&
petscView
);
VecView
(
rhsVec
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
VecDestroy
(
&
rhsVec
);
}
}
AMDiS/src/parallel/PetscSolverFeti.h
View file @
537e10f4
...
...
@@ -21,6 +21,7 @@
/** \file PetscSolverFeti.h */
#include
<map>
#include
"AMDiS_fwd.h"
#include
"parallel/MpiHelper.h"
#include
"parallel/PetscSolver.h"
#include
"parallel/PetscSolverFetiStructs.h"
...
...
@@ -223,24 +224,6 @@ namespace AMDiS {
/// order of: 1/h^2
void
createH2vec
(
DOFVector
<
double
>
&
vec
);
/// For debugging only!
void
createInteriorMat
(
Mat
&
mat
);