Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
11c8bbc9
Commit
11c8bbc9
authored
Sep 25, 2012
by
Thomas Witkowski
Browse files
Work on Stokes FETI-DP
parent
297ae0f3
Changes
6
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.h
View file @
11c8bbc9
...
...
@@ -256,12 +256,47 @@ namespace AMDiS {
vec
[
it
.
getDofIndex
()]
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()];
}
/// Works in the same way as the function above defined for DOFVectors. Due
/// to performance, this function does not call \ref synchVector for each
/// DOFVector, but instead sends all values of all DOFVectors all at once.
void
synchVector
(
SystemVector
&
vec
,
int
level
=
0
);
/// Works quite similar to the function \ref synchVector, but instead the
/// values of subdomain vectors are add along the boundaries.
template
<
typename
T
>
void
synchAddVector
(
DOFVector
<
T
>
&
vec
)
{
StdMpi
<
vector
<
T
>
>
stdMpi
(
mpiComm
);
const
FiniteElemSpace
*
fe
=
vec
.
getFeSpace
();
for
(
DofComm
::
Iterator
it
(
dofComm
.
getRecvDofs
(),
fe
);
!
it
.
end
();
it
.
nextRank
())
{
vector
<
T
>
dofs
;
dofs
.
reserve
(
it
.
getDofs
().
size
());
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
dofs
.
push_back
(
vec
[
it
.
getDofIndex
()]);
stdMpi
.
send
(
it
.
getRank
(),
dofs
);
}
for
(
DofComm
::
Iterator
it
(
dofComm
.
getSendDofs
());
!
it
.
end
();
it
.
nextRank
())
stdMpi
.
recv
(
it
.
getRank
());
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
dofComm
.
getSendDofs
(),
fe
);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
vec
[
it
.
getDofIndex
()]
+=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()];
synchVector
(
vec
);
}
/// In 3D, a subdomain may not be a valid AMDiS mesh if it contains two
/// parts which are only connected by an edge. In this case, the standard
/// refinement algorithm does not work correctly, as two elements connected
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
11c8bbc9
...
...
@@ -96,6 +96,8 @@ namespace AMDiS {
Mat
matTrans
;
MatTranspose
(
mat
,
MAT_INITIAL_MATRIX
,
&
matTrans
);
bool
isSym
=
true
;
if
(
advancedTest
)
{
int
rowStart
,
rowEnd
;
MatGetOwnershipRange
(
mat
,
&
rowStart
,
&
rowEnd
);
...
...
@@ -105,7 +107,6 @@ namespace AMDiS {
const
PetscScalar
*
vals
,
*
valsTrans
;
for
(
int
i
=
rowStart
;
i
<
rowEnd
;
i
++
)
{
bool
isSym
=
true
;
MatGetRow
(
mat
,
i
,
&
nCols
,
&
cols
,
&
vals
);
MatGetRow
(
matTrans
,
i
,
&
nColsTrans
,
&
colsTrans
,
&
valsTrans
);
...
...
@@ -140,9 +141,6 @@ namespace AMDiS {
MatRestoreRow
(
mat
,
i
,
&
nCols
,
&
cols
,
&
vals
);
MatRestoreRow
(
matTrans
,
i
,
&
nColsTrans
,
&
colsTrans
,
&
valsTrans
);
if
(
!
isSym
)
return
false
;
}
}
...
...
@@ -154,6 +152,6 @@ namespace AMDiS {
MSG
(
"Matrix norm test: %e %e
\n
"
,
norm1
,
norm2
);
return
(
norm1
<
1e-10
&&
norm2
<
1e-10
);
return
(
isSym
&&
norm1
<
1e-10
&&
norm2
<
1e-10
);
}
}
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
11c8bbc9
...
...
@@ -112,8 +112,6 @@ namespace AMDiS {
TEST_EXIT
(
pressureComponent
>=
0
)
(
"FETI-DP in Stokes mode, no pressure component defined!
\n
"
);
MSG
(
"========= !!!! SUPER WARNING !!! ========
\n
"
);
MSG
(
"Use make use of stokes mode which is work in progress! We guarantee for nothing and even less than this!
\n
"
);
pressureFeSpace
=
feSpaces
[
pressureComponent
];
}
...
...
@@ -152,6 +150,11 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createDirichletData()"
);
bool
removeDirichletRows
=
false
;
Parameters
::
get
(
"parallel->feti->remove dirichlet"
,
removeDirichletRows
);
if
(
!
removeDirichletRows
)
return
;
int
nComponents
=
mat
.
getSize
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFMatrix
*
dofMat
=
mat
[
i
][
i
];
...
...
@@ -978,7 +981,6 @@ namespace AMDiS {
if
(
stokesMode
)
{
DOFVector
<
double
>
tmpvec
(
pressureFeSpace
,
"tmpvec"
);
createH2vec
(
tmpvec
);
VtkWriter
::
writeFile
(
&
tmpvec
,
"tmpvec.vtu"
);
interfaceDofMap
.
createVec
(
fetiInterfaceLumpedPreconData
.
h2vec
);
DofMap
&
m
=
interfaceDofMap
[
pressureFeSpace
].
getMap
();
...
...
@@ -992,15 +994,12 @@ namespace AMDiS {
VecAssemblyBegin
(
fetiInterfaceLumpedPreconData
.
h2vec
);
VecAssemblyEnd
(
fetiInterfaceLumpedPreconData
.
h2vec
);
localDofMap
.
createVec
(
fetiInterfaceLumpedPreconData
.
tmp_vec_b1
);
primalDofMap
.
createVec
(
fetiInterfaceLumpedPreconData
.
tmp_primal
);
fetiInterfaceLumpedPreconData
.
subSolver
=
subdomain
;
}
KSPGetPC
(
ksp_feti
,
&
precon_feti
);
PCSetType
(
precon_feti
,
PCSHELL
);
if
(
stokesMode
)
{
KSPCreate
(
PETSC_COMM_WORLD
,
&
fetiInterfaceLumpedPreconData
.
ksp_primal
);
KSPSetOperators
(
fetiInterfaceLumpedPreconData
.
ksp_primal
,
subdomain
->
getMatCoarse
(
0
,
0
),
...
...
@@ -1008,7 +1007,12 @@ namespace AMDiS {
SAME_NONZERO_PATTERN
);
KSPSetOptionsPrefix
(
fetiInterfaceLumpedPreconData
.
ksp_primal
,
"primal_"
);
KSPSetFromOptions
(
fetiInterfaceLumpedPreconData
.
ksp_primal
);
}
KSPGetPC
(
ksp_feti
,
&
precon_feti
);
PCSetType
(
precon_feti
,
PCSHELL
);
if
(
stokesMode
)
{
PCShellSetContext
(
precon_feti
,
static_cast
<
void
*>
(
&
fetiInterfaceLumpedPreconData
));
PCShellSetApply
(
precon_feti
,
petscApplyFetiInterfaceLumpedPrecon
);
}
else
{
...
...
@@ -1129,6 +1133,23 @@ namespace AMDiS {
Vec
nullSpaceBasis
;
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
vecArray
,
&
nullSpaceBasis
);
int
writeFetiSystem
=
0
;
Parameters
::
get
(
"parallel->debug->write feti system"
,
writeFetiSystem
);
if
(
writeFetiSystem
)
{
Vec
vecTmp
;
createExplicitVec
(
nullSpaceBasis
,
vecTmp
);
PetscViewer
petscView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"nullspace.vec"
,
FILE_MODE_WRITE
,
&
petscView
);
VecView
(
vecTmp
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
VecDestroy
(
&
vecTmp
);
MSG
(
"Written FETI-DP null space basis vector!
\n
"
);
}
MatNullSpace
matNullSpace
;
MatNullSpaceCreate
(
mpiCommGlobal
,
PETSC_FALSE
,
1
,
&
nullSpaceBasis
,
&
matNullSpace
);
...
...
@@ -1264,81 +1285,6 @@ namespace AMDiS {
MatView
(
mat_schur_primal
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
}
int
writeFetiMatrix
=
0
;
Parameters
::
get
(
"parallel->debug->write feti matrix"
,
writeFetiMatrix
);
if
(
writeFetiMatrix
)
{
MSG
(
"Start creating explicit FETI-DP matrix!
\n
"
);
Vec
unitVector
;
Vec
resultVector
;
Mat
fetiMat
;
MatCreateAIJ
(
mpiCommGlobal
,
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getRankDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
PETSC_NULL
,
lagrangeMap
.
getOverallDofs
(),
PETSC_NULL
,
&
fetiMat
);
lagrangeMap
.
createVec
(
unitVector
);
lagrangeMap
.
createVec
(
resultVector
);
PetscInt
low
,
high
;
VecGetOwnershipRange
(
unitVector
,
&
low
,
&
high
);
int
nLocal
=
high
-
low
;
int
nnzCounter
=
0
;
for
(
int
i
=
0
;
i
<
lagrangeMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
,
0.0
);
if
(
i
>=
low
&&
i
<
high
)
VecSetValue
(
unitVector
,
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
);
VecAssemblyEnd
(
unitVector
);
MatMult
(
mat_feti
,
unitVector
,
resultVector
);
if
(
fetiPreconditioner
!=
FETI_NONE
)
{
PCApply
(
precon_feti
,
resultVector
,
unitVector
);
VecCopy
(
unitVector
,
resultVector
);
}
PetscScalar
*
vals
;
VecGetArray
(
resultVector
,
&
vals
);
for
(
int
j
=
0
;
j
<
nLocal
;
j
++
)
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
fetiMat
,
low
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
VecRestoreArray
(
resultVector
,
&
vals
);
}
MatAssemblyBegin
(
fetiMat
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
fetiMat
,
MAT_FINAL_ASSEMBLY
);
VecDestroy
(
&
unitVector
);
VecDestroy
(
&
resultVector
);
mpi
::
globalAdd
(
nnzCounter
);
PetscViewer
petscView
;
PetscViewerBinaryOpen
(
PETSC_COMM_WORLD
,
"feti.mat"
,
FILE_MODE_WRITE
,
&
petscView
);
MatView
(
fetiMat
,
petscView
);
PetscViewerDestroy
(
&
petscView
);
MatDestroy
(
&
fetiMat
);
MSG
(
"FETI-DP matrix written: %d x %d mat with %d nnz
\n
"
,
lagrangeMap
.
getOverallDofs
(),
lagrangeMap
.
getOverallDofs
(),
nnzCounter
);
}
}
...
...
@@ -1525,21 +1471,12 @@ namespace AMDiS {
// === Create matrices for the FETI-DP method. ===
if
(
printTimings
)
{
if
(
printTimings
)
MPI
::
COMM_WORLD
.
Barrier
();
}
double
wtime
=
MPI
::
Wtime
();
subdomain
->
fillPetscMatrix
(
mat
);
// MSG("MATRIX IS SYM: %d\n", testMatrixSymmetric(subdomain->getMatInterior(), true));
// if (MPI::COMM_WORLD.Get_rank() == 0)
// MatView(subdomain->getMatInterior(), PETSC_VIEWER_STDOUT_SELF);
// AMDiS::finalize();
// exit(0);
if
(
printTimings
)
{
MPI
::
COMM_WORLD
.
Barrier
();
MSG
(
"FETI-DP timing 02: %.5f seconds (creation of interior matrices)
\n
"
,
...
...
@@ -1558,23 +1495,18 @@ namespace AMDiS {
// === Create matrices for FETI-DP preconditioner. ===
createPreconditionerMatrix
(
mat
);
// MSG("DUALS MATRIX IS SYM: %d\n", testMatrixSymmetric(mat_duals_duals, true));
// AMDiS::finalize();
// exit(0);
// === Create and fill PETSc matrix for Lagrange constraints. ===
createMatLagrange
(
feSpaces
);
// === If required, run debug tests. ===
dbgMatrix
();
// === Create PETSc solver for the Schur complement on primal variables. ===
createSchurPrimalKsp
(
feSpaces
);
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp
(
feSpaces
);
// === If required, run debug tests. ===
dbgMatrix
();
}
...
...
@@ -1920,6 +1852,8 @@ namespace AMDiS {
FetiTimings
::
reset
();
}
// === Optionally run some debug procedures on the FETI-DP system. ===
debugFeti
(
vecRhs
);
// === Solve with FETI-DP operator. ===
KSPSolve
(
ksp_feti
,
vecRhs
,
vecSol
);
...
...
@@ -2055,27 +1989,18 @@ namespace AMDiS {
vec
.
set
(
0.0
);
DOFVector
<
int
>
cvec
(
vec
.
getFeSpace
(),
"cvec"
);
cvec
.
set
(
0.0
);
Mesh
*
mesh
=
vec
.
getFeSpace
()
->
getMesh
();
TEST_EXIT
(
mesh
->
getDim
()
==
2
)(
"This works only in 2D!
\n
"
);
int
n0
=
vec
.
getFeSpace
()
->
getAdmin
()
->
getNumberOfPreDofs
(
VERTEX
);
TraverseStack
stack
;
ElInfo
*
elInfo
=
stack
.
traverseFirst
(
mesh
,
-
1
,
Mesh
::
CALL_LEAF_EL
|
Mesh
::
FILL_COORDS
|
Mesh
::
FILL_DET
);
stack
.
traverseFirst
(
mesh
,
-
1
,
Mesh
::
CALL_LEAF_EL
|
Mesh
::
FILL_COORDS
);
while
(
elInfo
)
{
Element
*
el
=
elInfo
->
getElement
();
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
DegreeOfFreedom
dof
=
el
->
getDof
(
i
,
n0
);
vec
[
dof
]
+=
elInfo
->
getDet
()
*
0.5
;
cvec
[
dof
]
+=
1
;
}
std
::
vector
<
double
>
length
(
3
,
0.0
);
double
sumLength
=
0.0
;
/*
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
int
idx0
=
el
->
getVertexOfEdge
(
i
,
0
);
int
idx1
=
el
->
getVertexOfEdge
(
i
,
1
);
...
...
@@ -2085,22 +2010,27 @@ namespace AMDiS {
WorldVector
<
double
>
c0
=
elInfo
->
getCoord
(
idx0
);
WorldVector
<
double
>
c1
=
elInfo
->
getCoord
(
idx1
);
c0
-=
c1
;
double length = norm(c0);
vec[dof0] += length;
vec[dof1] += length;
cvec[dof0] += 1;
cvec[dof1] += 1;
length
[
i
]
=
norm
(
c0
);
sumLength
+=
length
[
i
];
}
sumLength
*=
0.5
;
double
area
=
sqrt
(
sumLength
*
(
sumLength
-
length
[
0
])
*
(
sumLength
-
length
[
1
])
*
(
sumLength
-
length
[
2
]));
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
DegreeOfFreedom
d
=
el
->
getDof
(
i
,
n0
);
vec
[
d
]
+=
area
;
}
*/
elInfo
=
stack
.
traverseNext
(
elInfo
);
}
meshDistributor
->
synchAddVector
(
vec
);
DOFIterator
<
double
>
dofIt
(
&
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
DegreeOfFreedom
d
=
dofIt
.
getDOFIndex
();
vec
[
d
]
/=
static_cast
<
double
>
(
cvec
[
d
]);
vec
[
d
]
=
1.0
/
pow
(
vec
[
d
],
2.0
);
}
}
...
...
@@ -2287,4 +2217,265 @@ namespace AMDiS {
VecDestroy
(
&
nullSpaceBasis
);
VecDestroy
(
&
vecSol
);
}
void
PetscSolverFeti
::
createExplicitFetiMat
(
Mat
fetiMat
,
Mat
&
explicitMat
,
int
&
nnzCounter
)
{
FUNCNAME
(
"PetscSolverFeti::createExplicitFetiMat()"
);
nnzCounter
=
0
;
if
(
stokesMode
==
false
)
{
int
nRankRows
=
lagrangeMap
.
getRankDofs
();
int
nOverallRows
=
lagrangeMap
.
getOverallDofs
();
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
PETSC_NULL
,
nOverallRows
,
PETSC_NULL
,
&
explicitMat
);
Vec
unitVector
;
Vec
resultVector
;
lagrangeMap
.
createVec
(
unitVector
);
lagrangeMap
.
createVec
(
resultVector
);
PetscInt
low
,
high
;
VecGetOwnershipRange
(
unitVector
,
&
low
,
&
high
);
int
nLocal
=
high
-
low
;
int
nnzCounter
=
0
;
for
(
int
i
=
0
;
i
<
lagrangeMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
,
0.0
);
if
(
i
>=
low
&&
i
<
high
)
VecSetValue
(
unitVector
,
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
);
VecAssemblyEnd
(
unitVector
);
MatMult
(
fetiMat
,
unitVector
,
resultVector
);
if
(
fetiPreconditioner
!=
FETI_NONE
)
{
PCApply
(
precon_feti
,
resultVector
,
unitVector
);
VecCopy
(
unitVector
,
resultVector
);
}
PetscScalar
*
vals
;
VecGetArray
(
resultVector
,
&
vals
);
for
(
int
j
=
0
;
j
<
nLocal
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
low
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
,
&
vals
);
}
VecDestroy
(
&
unitVector
);
VecDestroy
(
&
resultVector
);
}
else
{
int
nRankRows
=
interfaceDofMap
.
getRankDofs
()
+
lagrangeMap
.
getRankDofs
();
int
nOverallRows
=
interfaceDofMap
.
getOverallDofs
()
+
lagrangeMap
.
getOverallDofs
();
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
PETSC_NULL
,
nOverallRows
,
PETSC_NULL
,
&
explicitMat
);
Vec
unitVector
[
2
];
Vec
resultVector
[
2
];
interfaceDofMap
.
createVec
(
unitVector
[
0
]);
interfaceDofMap
.
createVec
(
resultVector
[
0
]);
lagrangeMap
.
createVec
(
unitVector
[
1
]);
lagrangeMap
.
createVec
(
resultVector
[
1
]);
Vec
unitNestVec
,
resultNestVec
;
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
unitVector
,
&
unitNestVec
);
VecCreateNest
(
mpiCommGlobal
,
2
,
PETSC_NULL
,
resultVector
,
&
resultNestVec
);
PetscInt
lowInterface
,
highInterface
;
VecGetOwnershipRange
(
unitVector
[
0
],
&
lowInterface
,
&
highInterface
);
int
nLocalInterface
=
highInterface
-
lowInterface
;
PetscInt
lowLagrange
,
highLagrange
;
VecGetOwnershipRange
(
unitVector
[
1
],
&
lowLagrange
,
&
highLagrange
);
int
nLocalLagrange
=
highLagrange
-
lowLagrange
;
VecSet
(
unitVector
[
1
],
0.0
);
for
(
int
i
=
0
;
i
<
interfaceDofMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
[
0
],
0.0
);
if
(
i
>=
lowInterface
&&
i
<
highInterface
)
VecSetValue
(
unitVector
[
0
],
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
[
0
]);
VecAssemblyEnd
(
unitVector
[
0
]);
VecAssemblyBegin
(
unitNestVec
);
VecAssemblyEnd
(
unitNestVec
);
MatMult
(
fetiMat
,
unitNestVec
,
resultNestVec
);
PetscScalar
*
vals
;
VecGetArray
(
resultVector
[
0
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalInterface
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
lowInterface
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
0
],
&
vals
);
VecGetArray
(
resultVector
[
1
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalLagrange
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
interfaceDofMap
.
getOverallDofs
()
+
lowLagrange
+
j
,
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
1
],
&
vals
);
}
VecSet
(
unitVector
[
0
],
0.0
);
for
(
int
i
=
0
;
i
<
lagrangeMap
.
getOverallDofs
();
i
++
)
{
VecSet
(
unitVector
[
1
],
0.0
);
if
(
i
>=
lowLagrange
&&
i
<
highLagrange
)
VecSetValue
(
unitVector
[
1
],
i
,
1.0
,
INSERT_VALUES
);
VecAssemblyBegin
(
unitVector
[
1
]);
VecAssemblyEnd
(
unitVector
[
1
]);
VecAssemblyBegin
(
unitNestVec
);
VecAssemblyEnd
(
unitNestVec
);
MatMult
(
fetiMat
,
unitNestVec
,
resultNestVec
);
PetscScalar
*
vals
;
VecGetArray
(
resultVector
[
0
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalInterface
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
lowInterface
+
j
,
interfaceDofMap
.
getOverallDofs
()
+
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
0
],
&
vals
);
VecGetArray
(
resultVector
[
1
],
&
vals
);
for
(
int
j
=
0
;
j
<
nLocalLagrange
;
j
++
)
{
if
(
fabs
(
vals
[
j
])
>
1e-30
)
{
MatSetValue
(
explicitMat
,
interfaceDofMap
.
getOverallDofs
()
+
lowLagrange
+
j
,
interfaceDofMap
.
getOverallDofs
()
+
i
,
vals
[
j
],
INSERT_VALUES
);
nnzCounter
++
;
}
}
VecRestoreArray
(
resultVector
[
1
],
&
vals
);
}
VecDestroy
(
&
unitVector
[
0
]);
VecDestroy
(
&
unitVector
[
1
]);
VecDestroy
(
&
resultVector
[
0
]);
VecDestroy
(
&
resultVector
[
1
]);
VecDestroy
(
&
unitNestVec
);
VecDestroy
(
&
resultNestVec
);
}
MatAssemblyBegin
(
explicitMat
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
explicitMat
,
MAT_FINAL_ASSEMBLY
);
mpi
::
globalAdd
(
nnzCounter
);
}
void
PetscSolverFeti
::
createExplicitVec
(
Vec
nestedVec
,
Vec
&
explicitVec
)
{
FUNCNAME
(
"PetscSolverFeti::createExplicitVec()"
);
int
nNested
=
0
;
VecNestGetSize
(
nestedVec
,
&
nNested
);
TEST_EXIT_DBG
(
nNested
==
2
)
(
"Only supported for 2-nest vectors, not for %d-nest vectors!
\n
"
,
nNested
);
Vec
v0
,
v1
;
VecNestGetSubVec
(
nestedVec
,
0
,
&
v0
);
VecNestGetSubVec
(
nestedVec
,
1
,
&
v1
);
int
s0
,
s1
;
VecGetSize
(
v0
,
&
s0
);
VecGetSize
(
v1
,
&
s1
);
int
l0
,
l1
;
VecGetLocalSize
(
v0
,
&
l0
);
VecGetLocalSize
(
v1
,
&
l1
);
int
rStart0
,
rStart1
;
VecGetOwnershipRange
(
v0
,
&
rStart0
,
PETSC_NULL
);
VecGetOwnershipRange
(
v1
,
&
rStart1
,
PETSC_NULL
);
VecCreateMPI
(
mpiCommGlobal
,
l0
+
l1
,
s0
+
s1
,
&
explicitVec
);
double
*
val
;
VecGetArray
(
v0
,
&
val
);
for
(
int
i
=
0
;
i
<
l0
;
i
++
)
VecSetValue
(
explicitVec
,
rStart0
+
i
,
val
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
v0
,
&
val
);
VecGetArray
(
v1
,
&
val
);
for
(
int
i
=
0
;
i
<
l1
;
i
++
)
VecSetValue
(
explicitVec
,
s0
+
rStart1
+
i
,
val
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
v1
,
&
val
);
VecAssemblyBegin
(
explicitVec
);
VecAssemblyEnd
(
explicitVec
);
}
void
PetscSolverFeti
::
debugFeti
(
Vec
dbgRhsVec
)
{
FUNCNAME
(
"PetscSolverFeti:::debugFeti()"
);
int
writeFetiSystem
=
0
;
Parameters
::
get
(
"parallel->debug->write feti system"
,
writeFetiSystem
);
if
(<