Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
53c5f965
Commit
53c5f965
authored
Jun 26, 2012
by
Thomas Witkowski
Browse files
Some work on using the nnz structure of the matrix in FETI-DP method.
parent
81d8d089
Changes
7
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
53c5f965
...
...
@@ -85,6 +85,7 @@ namespace AMDiS {
repartitionIthChange
(
20
),
nMeshChangesAfterLastRepartitioning
(
0
),
repartitioningCounter
(
0
),
repartitioningFailed
(
0
),
debugOutputDir
(
""
),
lastMeshChangeIndex
(
0
),
createBoundaryDofFlag
(
0
),
...
...
@@ -1006,9 +1007,13 @@ namespace AMDiS {
INFO
(
info
,
8
)(
"Parallel mesh adaption needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
first
);
if
(
tryRepartition
&&
repartitioningAllowed
&&
nMeshChangesAfterLastRepartitioning
>=
repartitionIthChange
)
{
if
(
repartitioningFailed
>
0
)
{
MSG_DBG
(
"Repartitioning not tried because it has failed in the past!
\n
"
);
repartitioningFailed
--
;
}
else
if
(
tryRepartition
&&
repartitioningAllowed
&&
nMeshChangesAfterLastRepartitioning
>=
repartitionIthChange
)
{
repartitionMesh
();
nMeshChangesAfterLastRepartitioning
=
0
;
}
else
{
...
...
@@ -1314,6 +1319,7 @@ namespace AMDiS {
bool
partitioningSucceed
=
partitioner
->
partition
(
elemWeights
,
ADAPTIVE_REPART
);
if
(
!
partitioningSucceed
)
{
repartitioningFailed
=
20
;
MSG
(
"Mesh partitioner created empty partition!
\n
"
);
return
;
}
...
...
@@ -1321,8 +1327,9 @@ namespace AMDiS {
// In the case the partitioner does not create a new mesh partition, return
// without and changes.
if
(
!
partitioner
->
meshChanged
())
{
repartitioningFailed
=
20
;
MSG
(
"Mesh partition does not create a new partition!
\n
"
);
return
;
return
;
}
TEST_EXIT_DBG
(
!
(
partitioner
->
getSendElements
().
size
()
==
mesh
->
getMacroElements
().
size
()
&&
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
53c5f965
...
...
@@ -527,6 +527,11 @@ namespace AMDiS {
/// variable is used only for debug outputs.
int
repartitioningCounter
;
/// If repartitioning of the mesh fail, this variable has a positive value
/// that gives the number of mesh changes the mesh distributer will wait
/// before trying new mesh repartitioning.
int
repartitioningFailed
;
/// Directory name where all debug output files should be written to.
string
debugOutputDir
;
...
...
AMDiS/src/parallel/ParMetisPartitioner.h
View file @
53c5f965
...
...
@@ -171,7 +171,7 @@ namespace AMDiS {
ParMetisPartitioner
(
MPI
::
Intracomm
*
comm
)
:
MeshPartitioner
(
comm
),
parMetisMesh
(
NULL
),
itr
(
1000.0
)
itr
(
1000
000
.0
)
{}
~
ParMetisPartitioner
();
...
...
AMDiS/src/parallel/ParallelDofMapping.h
View file @
53c5f965
...
...
@@ -131,6 +131,25 @@ namespace AMDiS {
return
dofMap
[
d
];
}
/** \brief
* Searchs the map for a given DOF. It does not fail, if the DOF is not
* mapped by this mapping. In this case, it returns false. If the DOF is
* mapped, the result is stored and the function returns true.
*
* \param[in] dof DOF index for which a mapping is searched.
* \param[out] index In the case that the DOF is mapped, the result
* is stored here.
*/
inline
bool
find
(
DegreeOfFreedom
dof
,
MultiIndex
&
index
)
{
DofMap
::
iterator
it
=
dofMap
.
find
(
dof
);
if
(
it
==
dofMap
.
end
())
return
false
;
index
=
it
->
second
;
return
true
;
}
/// Inserts a new DOF to rank's mapping. The DOF is assumed to be owend by
/// the rank.
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
53c5f965
...
...
@@ -269,11 +269,11 @@ namespace AMDiS {
if
(
meshLevel
==
0
)
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
mpiCommGlobal
,
mpiCommLocal
);
mpiCommGlobal
,
mpiCommLocal
);
}
else
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
subdomain
->
setLevel
(
meshLevel
);
}
}
...
...
@@ -417,12 +417,11 @@ namespace AMDiS {
(
"Should not happen!
\n
"
);
}
// If multi level test, inform sub domain solver about coarse space.
subdomain
->
setDofMapping
(
&
localDofMap
,
&
primalDofMap
);
if
(
printTimings
)
{
timeCounter
=
MPI
::
Wtime
()
-
timeCounter
;
MSG
(
"FETI-DP timing 01: %.5f seconds (creation of basic data structures)"
,
MSG
(
"FETI-DP timing 01: %.5f seconds (creation of basic data structures)
\n
"
,
timeCounter
);
}
}
...
...
@@ -693,6 +692,7 @@ namespace AMDiS {
lagrangeMap
.
getOverallDofs
(),
nGlobalOverallInterior
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
&
mat_lagrange
);
MatSetOption
(
mat_lagrange
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
// === Create for all duals the corresponding Lagrange constraints. On ===
// === each rank we traverse all pairs (n, m) of ranks, with n < m, ===
...
...
@@ -792,7 +792,9 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankB
,
nRowsRankPrimal
,
nGlobalOverallInterior
,
nRowsOverallPrimal
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
150
,
PETSC_NULL
,
150
,
PETSC_NULL
,
&
matBPi
);
MatSetOption
(
matBPi
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
Mat
matPrimal
;
PetscInt
nCols
;
...
...
@@ -1209,7 +1211,7 @@ namespace AMDiS {
// === Create matrices for the FETI-DP method. ===
double
wtime
=
MPI
::
Wtime
();
subdomain
->
fillPetscMatrix
(
mat
);
if
(
printTimings
)
...
...
@@ -1225,23 +1227,30 @@ namespace AMDiS {
int
nRowsDual
=
dualDofMap
.
getRankDofs
();
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsDual
,
nRowsDual
,
6
0
,
PETSC_NULL
,
nRowsDual
,
nRowsDual
,
10
0
,
PETSC_NULL
,
&
mat_duals_duals
);
MatSetOption
(
mat_duals_duals
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
if
(
fetiPreconditioner
==
FETI_DIRICHLET
)
{
int
nRowsInterior
=
interiorDofMap
.
getRankDofs
();
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsInterior
,
nRowsInterior
,
6
0
,
PETSC_NULL
,
nRowsInterior
,
nRowsInterior
,
10
0
,
PETSC_NULL
,
&
mat_interior_interior
);
MatSetOption
(
mat_interior_interior
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsInterior
,
nRowsDual
,
6
0
,
PETSC_NULL
,
nRowsInterior
,
nRowsDual
,
10
0
,
PETSC_NULL
,
&
mat_interior_duals
);
MatSetOption
(
mat_interior_duals
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsDual
,
nRowsInterior
,
6
0
,
PETSC_NULL
,
nRowsDual
,
nRowsInterior
,
10
0
,
PETSC_NULL
,
&
mat_duals_interior
);
MatSetOption
(
mat_duals_interior
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
// === Prepare traverse of sequentially created matrices. ===
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
53c5f965
...
...
@@ -33,31 +33,10 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
// === Check if mesh was changed and, in this case, recompute matrix ===
// === nnz structure and matrix indices. ===
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
// === If required, recompute non zero structure of the matrix. ===
if
(
!
d_nnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
interiorMap
->
setComputeMatIndex
(
true
,
true
);
interiorMap
->
update
(
feSpaces
);
if
(
d_nnz
)
{
delete
[]
d_nnz
;
d_nnz
=
NULL
;
delete
[]
o_nnz
;
o_nnz
=
NULL
;
}
updateSubdomainData
();
if
(
checkMeshChange
(
mat
))
createPetscNnzStructure
(
mat
);
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
}
// === Create PETSc vector (solution and a temporary vector). ===
...
...
@@ -72,7 +51,7 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_n
nz
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
0
,
matInteriorOffdiagN
nz
,
&
matIntInt
);
MatSetOption
(
matIntInt
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
...
...
@@ -149,16 +128,34 @@ namespace AMDiS {
int
nRowsRankInterior
=
interiorMap
->
getRankDofs
();
int
nRowsOverallInterior
=
interiorMap
->
getOverallDofs
();
if
(
subdomainLevel
==
0
)
{
// === If required, recompute non zero structure of the matrix. ===
bool
localMatrix
=
(
subdomainLevel
==
0
);
if
(
checkMeshChange
(
mat
,
localMatrix
))
{
createPetscNnzStructureWithCoarseSpace
(
mat
,
*
interiorMap
,
matInteriorDiagNnz
,
matInteriorOffdiagNnz
,
localMatrix
);
if
(
coarseSpaceMap
)
createPetscNnzStructureWithCoarseSpace
(
mat
,
*
coarseSpaceMap
,
matCoarseDiagNnz
,
matCoarseOffdiagNnz
);
}
if
(
localMatrix
)
{
MatCreateSeqAIJ
(
mpiCommLocal
,
nRowsRankInterior
,
nRowsRankInterior
,
6
0
,
PETSC_NULL
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
&
matIntInt
);
}
else
{
MatCreateAIJ
(
mpiCommLocal
,
nRowsRankInterior
,
nRowsRankInterior
,
nRowsOverallInterior
,
nRowsOverallInterior
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
0
,
matInteriorOffdiagNnz
,
&
matIntInt
);
}
if
(
coarseSpaceMap
)
{
int
nRowsRankCoarse
=
coarseSpaceMap
->
getRankDofs
();
int
nRowsOverallCoarse
=
coarseSpaceMap
->
getOverallDofs
();
...
...
@@ -166,17 +163,23 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matCoarseCoarse
);
0
,
matCoarseDiagNnz
,
0
,
matCoarseOffdiagNnz
,
&
matCoarseCoarse
);
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankCoarse
,
nRowsRankInterior
,
nRowsOverallCoarse
,
nGlobalOverallInterior
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matCoarseInt
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matCoarseInt
);
MatSetOption
(
matCoarseInt
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankInterior
,
nRowsRankCoarse
,
nGlobalOverallInterior
,
nRowsOverallCoarse
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matIntCoarse
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
// 0, matInteriorDiagNnz, 0, matCoarseOffdiagNnz,
&
matIntCoarse
);
MatSetOption
(
matIntCoarse
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
// === Prepare traverse of sequentially created matrices. ===
...
...
@@ -564,6 +567,52 @@ namespace AMDiS {
}
bool
PetscSolverGlobalMatrix
::
checkMeshChange
(
Matrix
<
DOFMatrix
*>
*
mat
,
bool
localMatrix
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::checkMeshChange()"
);
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
!
matInteriorDiagNnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
interiorMap
->
setComputeMatIndex
(
true
,
!
localMatrix
);
interiorMap
->
update
(
feSpaces
);
if
(
matInteriorDiagNnz
)
{
delete
[]
matInteriorDiagNnz
;
matInteriorDiagNnz
=
NULL
;
}
if
(
matInteriorOffdiagNnz
)
{
delete
[]
matInteriorOffdiagNnz
;
matInteriorOffdiagNnz
=
NULL
;
}
if
(
matCoarseDiagNnz
)
{
delete
[]
matCoarseDiagNnz
;
matCoarseDiagNnz
=
NULL
;
}
if
(
matCoarseOffdiagNnz
)
{
delete
[]
matCoarseOffdiagNnz
;
matCoarseOffdiagNnz
=
NULL
;
}
updateSubdomainData
();
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
return
true
;
}
return
false
;
}
void
PetscSolverGlobalMatrix
::
createFieldSplit
(
PC
pc
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createFieldSplit()"
);
...
...
@@ -985,18 +1034,17 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createPetscNnzStructure()"
);
TEST_EXIT_DBG
(
!
d_nnz
)(
"There is something wrong!
\n
"
);
TEST_EXIT_DBG
(
!
o_nnz
)(
"There is something wrong!
\n
"
);
TEST_EXIT_DBG
(
!
matInteriorDiagNnz
)(
"There is something wrong!
\n
"
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
int
nRankRows
=
interiorMap
->
getRankDofs
();
int
rankStartIndex
=
interiorMap
->
getStartDofs
();
d_n
nz
=
new
int
[
nRankRows
];
o_n
nz
=
new
int
[
nRankRows
];
matInteriorDiagN
nz
=
new
int
[
nRankRows
];
matInteriorOffdiagN
nz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
{
d_n
nz
[
i
]
=
0
;
o_n
nz
[
i
]
=
0
;
matInteriorDiagN
nz
[
i
]
=
0
;
matInteriorOffdiagN
nz
[
i
]
=
0
;
}
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
...
...
@@ -1091,13 +1139,13 @@ namespace AMDiS {
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the
d_n
nz values for this row,
otherwise the
// o
_n
nz value.
// increment the
matInteriorDiagN
nz values for this row,
// o
therwise the matInteriorDiagN
nz value.
if
(
petscColIdx
>=
rankStartIndex
&&
petscColIdx
<
rankStartIndex
+
nRankRows
)
d_n
nz
[
localPetscRowIdx
]
++
;
matInteriorDiagN
nz
[
localPetscRowIdx
]
++
;
else
o_n
nz
[
localPetscRowIdx
]
++
;
matInteriorOffdiagN
nz
[
localPetscRowIdx
]
++
;
}
}
}
else
{
...
...
@@ -1154,9 +1202,9 @@ namespace AMDiS {
r
,
localRowIdx
,
nRankRows
,
it
->
first
);
if
(
c
<
rankStartIndex
||
c
>=
rankStartIndex
+
nRankRows
)
o_n
nz
[
localRowIdx
]
++
;
matInteriorOffdiagN
nz
[
localRowIdx
]
++
;
else
d_n
nz
[
localRowIdx
]
++
;
matInteriorDiagN
nz
[
localRowIdx
]
++
;
}
}
}
...
...
@@ -1169,7 +1217,254 @@ namespace AMDiS {
if
(
nRankRows
<
100
)
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
d_n
nz
[
i
]
=
std
::
min
(
d_n
nz
[
i
],
nRankRows
);
matInteriorDiagN
nz
[
i
]
=
std
::
min
(
matInteriorDiagN
nz
[
i
],
nRankRows
);
}
void
PetscSolverGlobalMatrix
::
createPetscNnzStructureWithCoarseSpace
(
Matrix
<
DOFMatrix
*>
*
mat
,
ParallelDofMapping
&
dofMap
,
int
*&
diagNnz
,
int
*&
offdiagNnz
,
bool
localMatrix
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createPetscNnzStructure()"
);
TEST_EXIT_DBG
(
!
diagNnz
)(
"There is something wrong!
\n
"
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
int
nRankRows
=
dofMap
.
getRankDofs
();
int
rankStartIndex
=
dofMap
.
getStartDofs
();
diagNnz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
diagNnz
[
i
]
=
0
;
if
(
localMatrix
==
false
)
{
offdiagNnz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
offdiagNnz
[
i
]
=
0
;
}
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
namespace
traits
=
mtl
::
traits
;
typedef
DOFMatrix
::
base_matrix_type
Matrix
;
typedef
vector
<
pair
<
int
,
int
>
>
MatrixNnzEntry
;
typedef
map
<
int
,
DofContainer
>
RankToDofContainer
;
// Stores to each rank a list of nnz entries (i.e. pairs of row and column
// index) that this rank will send to. These nnz entries will be assembled
// on this rank, but because the row DOFs are not DOFs of this rank they
// will be send to the owner of the row DOFs.
map
<
int
,
MatrixNnzEntry
>
sendMatrixEntry
;
// Maps to each DOF that must be send to another rank the rank number of the
// receiving rank.
map
<
pair
<
DegreeOfFreedom
,
int
>
,
int
>
sendDofToRank
;
// First, create for all ranks, to which we send data to, MatrixNnzEntry
// object with 0 entries.
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
sendMatrixEntry
[
it
.
getRank
()].
resize
(
0
);
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
sendDofToRank
[
make_pair
(
it
.
getDofIndex
(),
i
)]
=
it
.
getRank
();
}
}
// Create list of ranks from which we receive data from.
std
::
set
<
int
>
recvFromRank
;
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getSendDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
recvFromRank
.
insert
(
it
.
getRank
());
// === Traverse matrices to create nnz data. ===
int
nComponents
=
mat
->
getNumRows
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
if
(
!
(
*
mat
)[
i
][
j
])
continue
;
TEST_EXIT_DBG
((
*
mat
)[
i
][
j
]
->
getRowFeSpace
()
==
feSpaces
[
i
])
(
"Should not happen!
\n
"
);
TEST_EXIT_DBG
((
*
mat
)[
i
][
j
]
->
getColFeSpace
()
==
feSpaces
[
j
])
(
"Should not happen!
\n
"
);
Matrix
bmat
=
(
*
mat
)[
i
][
j
]
->
getBaseMatrix
();
traits
::
col
<
Matrix
>::
type
col
(
bmat
);
traits
::
const_value
<
Matrix
>::
type
value
(
bmat
);
typedef
traits
::
range_generator
<
row
,
Matrix
>::
type
cursor_type
;
typedef
traits
::
range_generator
<
nz
,
cursor_type
>::
type
icursor_type
;
MultiIndex
rowDofIndex
;
for
(
cursor_type
cursor
=
begin
<
row
>
(
bmat
),
cend
=
end
<
row
>
(
bmat
);
cursor
!=
cend
;
++
cursor
)
{
if
(
dofMap
[
feSpaces
[
i
]].
find
(
*
cursor
,
rowDofIndex
)
==
false
)
continue
;
// The corresponding global matrix row index of the current row DOF.
int
petscRowIdx
=
0
;
if
(
localMatrix
)
{
petscRowIdx
=
dofMap
.
getLocalMatIndex
(
i
,
*
cursor
);
}
else
{
if
(
dofMap
.
isMatIndexFromGlobal
())
petscRowIdx
=
dofMap
.
getMatIndex
(
i
,
rowDofIndex
.
global
);
else
petscRowIdx
=
dofMap
.
getMatIndex
(
i
,
*
cursor
);
}
if
(
localMatrix
||
dofMap
[
feSpaces
[
i
]].
isRankDof
(
*
cursor
))
{
// === The current row DOF is a rank DOF, so create the ===
// === corresponding nnz values directly on rank's nnz data. ===
// This is the local row index of the local PETSc matrix.
int
localPetscRowIdx
=
petscRowIdx
;
if
(
localMatrix
==
false
)
{
localPetscRowIdx
-=
rankStartIndex
;
TEST_EXIT_DBG
(
localPetscRowIdx
>=
0
&&
localPetscRowIdx
<
nRankRows
)
(
"Should not happen!
\n
Debug info: DOF = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d compontens = %d from %d nRankRows = %d
\n
"
,
*
cursor
,
dofMap
[
feSpaces
[
i
]][
*
cursor
].
global
,
petscRowIdx
,
localPetscRowIdx
,
rankStartIndex
,
i
,
nComponents
,
nRankRows
);
}
if
(
localMatrix
)
{
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
if
(
dofMap
[
feSpaces
[
j
]].
isSet
(
col
(
*
icursor
)))
diagNnz
[
localPetscRowIdx
]
++
;
}
else
{
MultiIndex
colDofIndex
;
// Traverse all non zero entries in this row.
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
if
(
dofMap
[
feSpaces
[
j
]].
find
(
col
(
*
icursor
),
colDofIndex
)
==
false
)
continue
;
int
petscColIdx
=
(
dofMap
.
isMatIndexFromGlobal
()
?
dofMap
.
getMatIndex
(
j
,
colDofIndex
.
global
)
:
dofMap
.
getMatIndex
(
j
,
col
(
*
icursor
)));
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the diagNnz values for this row,
// otherwise the offdiagNnz value.
if
(
petscColIdx
>=
rankStartIndex
&&
petscColIdx
<
rankStartIndex
+
nRankRows
)
diagNnz
[
localPetscRowIdx
]
++
;
else
offdiagNnz
[
localPetscRowIdx
]
++
;
}
}
}
}
else
{
// === The current row DOF is not a rank DOF, i.e., its values ===
// === are also created on this rank, but afterthere they will ===
// === be send to another rank. So we need to send also the ===
// === corresponding nnz structure of this row to the corres- ===
// === ponding rank. ===
// Send all non zero entries to the member of the row DOF.
int
sendToRank
=
sendDofToRank
[
make_pair
(
*
cursor
,
i
)];
MultiIndex
colDofIndex
;
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
if
(
dofMap
[
feSpaces
[
j
]].
find
(
col
(
*
icursor
),
colDofIndex
)
==
false
)
continue
;
if
(
value
(
*
icursor
)
!=
0.0
)
{
int
petscColIdx
=
(
dofMap
.
isMatIndexFromGlobal
()
?
dofMap
.
getMatIndex
(
j
,
colDofIndex
.
global
)
:
dofMap
.
getMatIndex
(
j
,
col
(
*
icursor
)));
sendMatrixEntry
[
sendToRank
].
push_back
(
make_pair
(
petscRowIdx
,
petscColIdx
));
}
}
}
// if (isRankDof[*cursor]) ... else ...
}
// for each row in mat[i][j]
}
}
if
(
localMatrix
==
false
)
{
// === Send and recv the nnz row structure to/from other ranks. ===
StdMpi
<
MatrixNnzEntry
>
stdMpi
(
mpiCommGlobal
,
true
);
stdMpi
.
send
(
sendMatrixEntry
);
for
(
std
::
set
<
int
>::
iterator
it
=
recvFromRank
.
begin
();