Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
53c5f965
Commit
53c5f965
authored
Jun 26, 2012
by
Thomas Witkowski
Browse files
Some work on using the nnz structure of the matrix in FETI-DP method.
parent
81d8d089
Changes
7
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
53c5f965
...
@@ -85,6 +85,7 @@ namespace AMDiS {
...
@@ -85,6 +85,7 @@ namespace AMDiS {
repartitionIthChange
(
20
),
repartitionIthChange
(
20
),
nMeshChangesAfterLastRepartitioning
(
0
),
nMeshChangesAfterLastRepartitioning
(
0
),
repartitioningCounter
(
0
),
repartitioningCounter
(
0
),
repartitioningFailed
(
0
),
debugOutputDir
(
""
),
debugOutputDir
(
""
),
lastMeshChangeIndex
(
0
),
lastMeshChangeIndex
(
0
),
createBoundaryDofFlag
(
0
),
createBoundaryDofFlag
(
0
),
...
@@ -1006,9 +1007,13 @@ namespace AMDiS {
...
@@ -1006,9 +1007,13 @@ namespace AMDiS {
INFO
(
info
,
8
)(
"Parallel mesh adaption needed %.5f seconds
\n
"
,
INFO
(
info
,
8
)(
"Parallel mesh adaption needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
first
);
MPI
::
Wtime
()
-
first
);
if
(
tryRepartition
&&
if
(
repartitioningFailed
>
0
)
{
repartitioningAllowed
&&
MSG_DBG
(
"Repartitioning not tried because it has failed in the past!
\n
"
);
nMeshChangesAfterLastRepartitioning
>=
repartitionIthChange
)
{
repartitioningFailed
--
;
}
else
if
(
tryRepartition
&&
repartitioningAllowed
&&
nMeshChangesAfterLastRepartitioning
>=
repartitionIthChange
)
{
repartitionMesh
();
repartitionMesh
();
nMeshChangesAfterLastRepartitioning
=
0
;
nMeshChangesAfterLastRepartitioning
=
0
;
}
else
{
}
else
{
...
@@ -1314,6 +1319,7 @@ namespace AMDiS {
...
@@ -1314,6 +1319,7 @@ namespace AMDiS {
bool
partitioningSucceed
=
bool
partitioningSucceed
=
partitioner
->
partition
(
elemWeights
,
ADAPTIVE_REPART
);
partitioner
->
partition
(
elemWeights
,
ADAPTIVE_REPART
);
if
(
!
partitioningSucceed
)
{
if
(
!
partitioningSucceed
)
{
repartitioningFailed
=
20
;
MSG
(
"Mesh partitioner created empty partition!
\n
"
);
MSG
(
"Mesh partitioner created empty partition!
\n
"
);
return
;
return
;
}
}
...
@@ -1321,8 +1327,9 @@ namespace AMDiS {
...
@@ -1321,8 +1327,9 @@ namespace AMDiS {
// In the case the partitioner does not create a new mesh partition, return
// In the case the partitioner does not create a new mesh partition, return
// without and changes.
// without and changes.
if
(
!
partitioner
->
meshChanged
())
{
if
(
!
partitioner
->
meshChanged
())
{
repartitioningFailed
=
20
;
MSG
(
"Mesh partition does not create a new partition!
\n
"
);
MSG
(
"Mesh partition does not create a new partition!
\n
"
);
return
;
return
;
}
}
TEST_EXIT_DBG
(
!
(
partitioner
->
getSendElements
().
size
()
==
mesh
->
getMacroElements
().
size
()
&&
TEST_EXIT_DBG
(
!
(
partitioner
->
getSendElements
().
size
()
==
mesh
->
getMacroElements
().
size
()
&&
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
53c5f965
...
@@ -527,6 +527,11 @@ namespace AMDiS {
...
@@ -527,6 +527,11 @@ namespace AMDiS {
/// variable is used only for debug outputs.
/// variable is used only for debug outputs.
int
repartitioningCounter
;
int
repartitioningCounter
;
/// If repartitioning of the mesh fail, this variable has a positive value
/// that gives the number of mesh changes the mesh distributer will wait
/// before trying new mesh repartitioning.
int
repartitioningFailed
;
/// Directory name where all debug output files should be written to.
/// Directory name where all debug output files should be written to.
string
debugOutputDir
;
string
debugOutputDir
;
...
...
AMDiS/src/parallel/ParMetisPartitioner.h
View file @
53c5f965
...
@@ -171,7 +171,7 @@ namespace AMDiS {
...
@@ -171,7 +171,7 @@ namespace AMDiS {
ParMetisPartitioner
(
MPI
::
Intracomm
*
comm
)
ParMetisPartitioner
(
MPI
::
Intracomm
*
comm
)
:
MeshPartitioner
(
comm
),
:
MeshPartitioner
(
comm
),
parMetisMesh
(
NULL
),
parMetisMesh
(
NULL
),
itr
(
1000.0
)
itr
(
1000
000
.0
)
{}
{}
~
ParMetisPartitioner
();
~
ParMetisPartitioner
();
...
...
AMDiS/src/parallel/ParallelDofMapping.h
View file @
53c5f965
...
@@ -131,6 +131,25 @@ namespace AMDiS {
...
@@ -131,6 +131,25 @@ namespace AMDiS {
return
dofMap
[
d
];
return
dofMap
[
d
];
}
}
/** \brief
* Searchs the map for a given DOF. It does not fail, if the DOF is not
* mapped by this mapping. In this case, it returns false. If the DOF is
* mapped, the result is stored and the function returns true.
*
* \param[in] dof DOF index for which a mapping is searched.
* \param[out] index In the case that the DOF is mapped, the result
* is stored here.
*/
inline
bool
find
(
DegreeOfFreedom
dof
,
MultiIndex
&
index
)
{
DofMap
::
iterator
it
=
dofMap
.
find
(
dof
);
if
(
it
==
dofMap
.
end
())
return
false
;
index
=
it
->
second
;
return
true
;
}
/// Inserts a new DOF to rank's mapping. The DOF is assumed to be owend by
/// Inserts a new DOF to rank's mapping. The DOF is assumed to be owend by
/// the rank.
/// the rank.
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
53c5f965
...
@@ -269,11 +269,11 @@ namespace AMDiS {
...
@@ -269,11 +269,11 @@ namespace AMDiS {
if
(
meshLevel
==
0
)
{
if
(
meshLevel
==
0
)
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
subdomain
->
setMeshDistributor
(
meshDistributor
,
mpiCommGlobal
,
mpiCommLocal
);
mpiCommGlobal
,
mpiCommLocal
);
}
else
{
}
else
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
subdomain
->
setMeshDistributor
(
meshDistributor
,
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
levelData
.
getMpiComm
(
meshLevel
));
subdomain
->
setLevel
(
meshLevel
);
subdomain
->
setLevel
(
meshLevel
);
}
}
}
}
...
@@ -417,12 +417,11 @@ namespace AMDiS {
...
@@ -417,12 +417,11 @@ namespace AMDiS {
(
"Should not happen!
\n
"
);
(
"Should not happen!
\n
"
);
}
}
// If multi level test, inform sub domain solver about coarse space.
subdomain
->
setDofMapping
(
&
localDofMap
,
&
primalDofMap
);
subdomain
->
setDofMapping
(
&
localDofMap
,
&
primalDofMap
);
if
(
printTimings
)
{
if
(
printTimings
)
{
timeCounter
=
MPI
::
Wtime
()
-
timeCounter
;
timeCounter
=
MPI
::
Wtime
()
-
timeCounter
;
MSG
(
"FETI-DP timing 01: %.5f seconds (creation of basic data structures)"
,
MSG
(
"FETI-DP timing 01: %.5f seconds (creation of basic data structures)
\n
"
,
timeCounter
);
timeCounter
);
}
}
}
}
...
@@ -693,6 +692,7 @@ namespace AMDiS {
...
@@ -693,6 +692,7 @@ namespace AMDiS {
lagrangeMap
.
getOverallDofs
(),
nGlobalOverallInterior
,
lagrangeMap
.
getOverallDofs
(),
nGlobalOverallInterior
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
2
,
PETSC_NULL
,
&
mat_lagrange
);
&
mat_lagrange
);
MatSetOption
(
mat_lagrange
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
// === Create for all duals the corresponding Lagrange constraints. On ===
// === Create for all duals the corresponding Lagrange constraints. On ===
// === each rank we traverse all pairs (n, m) of ranks, with n < m, ===
// === each rank we traverse all pairs (n, m) of ranks, with n < m, ===
...
@@ -792,7 +792,9 @@ namespace AMDiS {
...
@@ -792,7 +792,9 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankB
,
nRowsRankPrimal
,
nRowsRankB
,
nRowsRankPrimal
,
nGlobalOverallInterior
,
nRowsOverallPrimal
,
nGlobalOverallInterior
,
nRowsOverallPrimal
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
matBPi
);
150
,
PETSC_NULL
,
150
,
PETSC_NULL
,
&
matBPi
);
MatSetOption
(
matBPi
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
Mat
matPrimal
;
Mat
matPrimal
;
PetscInt
nCols
;
PetscInt
nCols
;
...
@@ -1209,7 +1211,7 @@ namespace AMDiS {
...
@@ -1209,7 +1211,7 @@ namespace AMDiS {
// === Create matrices for the FETI-DP method. ===
// === Create matrices for the FETI-DP method. ===
double
wtime
=
MPI
::
Wtime
();
double
wtime
=
MPI
::
Wtime
();
subdomain
->
fillPetscMatrix
(
mat
);
subdomain
->
fillPetscMatrix
(
mat
);
if
(
printTimings
)
if
(
printTimings
)
...
@@ -1225,23 +1227,30 @@ namespace AMDiS {
...
@@ -1225,23 +1227,30 @@ namespace AMDiS {
int
nRowsDual
=
dualDofMap
.
getRankDofs
();
int
nRowsDual
=
dualDofMap
.
getRankDofs
();
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsDual
,
nRowsDual
,
6
0
,
PETSC_NULL
,
nRowsDual
,
nRowsDual
,
10
0
,
PETSC_NULL
,
&
mat_duals_duals
);
&
mat_duals_duals
);
MatSetOption
(
mat_duals_duals
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
if
(
fetiPreconditioner
==
FETI_DIRICHLET
)
{
if
(
fetiPreconditioner
==
FETI_DIRICHLET
)
{
int
nRowsInterior
=
interiorDofMap
.
getRankDofs
();
int
nRowsInterior
=
interiorDofMap
.
getRankDofs
();
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsInterior
,
nRowsInterior
,
6
0
,
PETSC_NULL
,
nRowsInterior
,
nRowsInterior
,
10
0
,
PETSC_NULL
,
&
mat_interior_interior
);
&
mat_interior_interior
);
MatSetOption
(
mat_interior_interior
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsInterior
,
nRowsDual
,
6
0
,
PETSC_NULL
,
nRowsInterior
,
nRowsDual
,
10
0
,
PETSC_NULL
,
&
mat_interior_duals
);
&
mat_interior_duals
);
MatSetOption
(
mat_interior_duals
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
MatCreateSeqAIJ
(
PETSC_COMM_SELF
,
nRowsDual
,
nRowsInterior
,
6
0
,
PETSC_NULL
,
nRowsDual
,
nRowsInterior
,
10
0
,
PETSC_NULL
,
&
mat_duals_interior
);
&
mat_duals_interior
);
MatSetOption
(
mat_duals_interior
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
}
// === Prepare traverse of sequentially created matrices. ===
// === Prepare traverse of sequentially created matrices. ===
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
53c5f965
...
@@ -33,31 +33,10 @@ namespace AMDiS {
...
@@ -33,31 +33,10 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
double
wtime
=
MPI
::
Wtime
();
// === If required, recompute non zero structure of the matrix. ===
// === Check if mesh was changed and, in this case, recompute matrix ===
// === nnz structure and matrix indices. ===
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
!
d_nnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
if
(
checkMeshChange
(
mat
))
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
interiorMap
->
setComputeMatIndex
(
true
,
true
);
interiorMap
->
update
(
feSpaces
);
if
(
d_nnz
)
{
delete
[]
d_nnz
;
d_nnz
=
NULL
;
delete
[]
o_nnz
;
o_nnz
=
NULL
;
}
updateSubdomainData
();
createPetscNnzStructure
(
mat
);
createPetscNnzStructure
(
mat
);
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
}
// === Create PETSc vector (solution and a temporary vector). ===
// === Create PETSc vector (solution and a temporary vector). ===
...
@@ -72,7 +51,7 @@ namespace AMDiS {
...
@@ -72,7 +51,7 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_n
nz
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
0
,
matInteriorOffdiagN
nz
,
&
matIntInt
);
MatSetOption
(
matIntInt
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatSetOption
(
matIntInt
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
...
@@ -149,16 +128,34 @@ namespace AMDiS {
...
@@ -149,16 +128,34 @@ namespace AMDiS {
int
nRowsRankInterior
=
interiorMap
->
getRankDofs
();
int
nRowsRankInterior
=
interiorMap
->
getRankDofs
();
int
nRowsOverallInterior
=
interiorMap
->
getOverallDofs
();
int
nRowsOverallInterior
=
interiorMap
->
getOverallDofs
();
if
(
subdomainLevel
==
0
)
{
// === If required, recompute non zero structure of the matrix. ===
bool
localMatrix
=
(
subdomainLevel
==
0
);
if
(
checkMeshChange
(
mat
,
localMatrix
))
{
createPetscNnzStructureWithCoarseSpace
(
mat
,
*
interiorMap
,
matInteriorDiagNnz
,
matInteriorOffdiagNnz
,
localMatrix
);
if
(
coarseSpaceMap
)
createPetscNnzStructureWithCoarseSpace
(
mat
,
*
coarseSpaceMap
,
matCoarseDiagNnz
,
matCoarseOffdiagNnz
);
}
if
(
localMatrix
)
{
MatCreateSeqAIJ
(
mpiCommLocal
,
nRowsRankInterior
,
nRowsRankInterior
,
MatCreateSeqAIJ
(
mpiCommLocal
,
nRowsRankInterior
,
nRowsRankInterior
,
6
0
,
PETSC_NULL
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
&
matIntInt
);
}
else
{
}
else
{
MatCreateAIJ
(
mpiCommLocal
,
MatCreateAIJ
(
mpiCommLocal
,
nRowsRankInterior
,
nRowsRankInterior
,
nRowsRankInterior
,
nRowsRankInterior
,
nRowsOverallInterior
,
nRowsOverallInterior
,
nRowsOverallInterior
,
nRowsOverallInterior
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matIntInt
);
0
,
matInteriorDiagNnz
,
0
,
matInteriorOffdiagNnz
,
&
matIntInt
);
}
}
if
(
coarseSpaceMap
)
{
if
(
coarseSpaceMap
)
{
int
nRowsRankCoarse
=
coarseSpaceMap
->
getRankDofs
();
int
nRowsRankCoarse
=
coarseSpaceMap
->
getRankDofs
();
int
nRowsOverallCoarse
=
coarseSpaceMap
->
getOverallDofs
();
int
nRowsOverallCoarse
=
coarseSpaceMap
->
getOverallDofs
();
...
@@ -166,17 +163,23 @@ namespace AMDiS {
...
@@ -166,17 +163,23 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matCoarseCoarse
);
0
,
matCoarseDiagNnz
,
0
,
matCoarseOffdiagNnz
,
&
matCoarseCoarse
);
MatCreateAIJ
(
mpiCommGlobal
,
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankCoarse
,
nRowsRankInterior
,
nRowsRankCoarse
,
nRowsRankInterior
,
nRowsOverallCoarse
,
nGlobalOverallInterior
,
nRowsOverallCoarse
,
nGlobalOverallInterior
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matCoarseInt
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
&
matCoarseInt
);
MatSetOption
(
matCoarseInt
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatCreateAIJ
(
mpiCommGlobal
,
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankInterior
,
nRowsRankCoarse
,
nRowsRankInterior
,
nRowsRankCoarse
,
nGlobalOverallInterior
,
nRowsOverallCoarse
,
nGlobalOverallInterior
,
nRowsOverallCoarse
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matIntCoarse
);
100
,
PETSC_NULL
,
100
,
PETSC_NULL
,
// 0, matInteriorDiagNnz, 0, matCoarseOffdiagNnz,
&
matIntCoarse
);
MatSetOption
(
matIntCoarse
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
}
// === Prepare traverse of sequentially created matrices. ===
// === Prepare traverse of sequentially created matrices. ===
...
@@ -564,6 +567,52 @@ namespace AMDiS {
...
@@ -564,6 +567,52 @@ namespace AMDiS {
}
}
bool
PetscSolverGlobalMatrix
::
checkMeshChange
(
Matrix
<
DOFMatrix
*>
*
mat
,
bool
localMatrix
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::checkMeshChange()"
);
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
!
matInteriorDiagNnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
interiorMap
->
setComputeMatIndex
(
true
,
!
localMatrix
);
interiorMap
->
update
(
feSpaces
);
if
(
matInteriorDiagNnz
)
{
delete
[]
matInteriorDiagNnz
;
matInteriorDiagNnz
=
NULL
;
}
if
(
matInteriorOffdiagNnz
)
{
delete
[]
matInteriorOffdiagNnz
;
matInteriorOffdiagNnz
=
NULL
;
}
if
(
matCoarseDiagNnz
)
{
delete
[]
matCoarseDiagNnz
;
matCoarseDiagNnz
=
NULL
;
}
if
(
matCoarseOffdiagNnz
)
{
delete
[]
matCoarseOffdiagNnz
;
matCoarseOffdiagNnz
=
NULL
;
}
updateSubdomainData
();
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
return
true
;
}
return
false
;
}
void
PetscSolverGlobalMatrix
::
createFieldSplit
(
PC
pc
)
void
PetscSolverGlobalMatrix
::
createFieldSplit
(
PC
pc
)
{
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createFieldSplit()"
);
FUNCNAME
(
"PetscSolverGlobalMatrix::createFieldSplit()"
);
...
@@ -985,18 +1034,17 @@ namespace AMDiS {
...
@@ -985,18 +1034,17 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createPetscNnzStructure()"
);
FUNCNAME
(
"PetscSolverGlobalMatrix::createPetscNnzStructure()"
);
TEST_EXIT_DBG
(
!
d_nnz
)(
"There is something wrong!
\n
"
);
TEST_EXIT_DBG
(
!
matInteriorDiagNnz
)(
"There is something wrong!
\n
"
);
TEST_EXIT_DBG
(
!
o_nnz
)(
"There is something wrong!
\n
"
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
int
nRankRows
=
interiorMap
->
getRankDofs
();
int
nRankRows
=
interiorMap
->
getRankDofs
();
int
rankStartIndex
=
interiorMap
->
getStartDofs
();
int
rankStartIndex
=
interiorMap
->
getStartDofs
();
d_n
nz
=
new
int
[
nRankRows
];
matInteriorDiagN
nz
=
new
int
[
nRankRows
];
o_n
nz
=
new
int
[
nRankRows
];
matInteriorOffdiagN
nz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
{
d_n
nz
[
i
]
=
0
;
matInteriorDiagN
nz
[
i
]
=
0
;
o_n
nz
[
i
]
=
0
;
matInteriorOffdiagN
nz
[
i
]
=
0
;
}
}
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
...
@@ -1091,13 +1139,13 @@ namespace AMDiS {
...
@@ -1091,13 +1139,13 @@ namespace AMDiS {
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the
d_n
nz values for this row,
otherwise the
// increment the
matInteriorDiagN
nz values for this row,
// o
_n
nz value.
// o
therwise the matInteriorDiagN
nz value.
if
(
petscColIdx
>=
rankStartIndex
&&
if
(
petscColIdx
>=
rankStartIndex
&&
petscColIdx
<
rankStartIndex
+
nRankRows
)
petscColIdx
<
rankStartIndex
+
nRankRows
)
d_n
nz
[
localPetscRowIdx
]
++
;
matInteriorDiagN
nz
[
localPetscRowIdx
]
++
;
else
else
o_n
nz
[
localPetscRowIdx
]
++
;
matInteriorOffdiagN
nz
[
localPetscRowIdx
]
++
;
}
}
}
}
}
else
{
}
else
{
...
@@ -1154,9 +1202,9 @@ namespace AMDiS {
...
@@ -1154,9 +1202,9 @@ namespace AMDiS {
r
,
localRowIdx
,
nRankRows
,
it
->
first
);
r
,
localRowIdx
,
nRankRows
,
it
->
first
);
if
(
c
<
rankStartIndex
||
c
>=
rankStartIndex
+
nRankRows
)
if
(
c
<
rankStartIndex
||
c
>=
rankStartIndex
+
nRankRows
)
o_n
nz
[
localRowIdx
]
++
;
matInteriorOffdiagN
nz
[
localRowIdx
]
++
;
else
else
d_n
nz
[
localRowIdx
]
++
;
matInteriorDiagN
nz
[
localRowIdx
]
++
;
}
}
}
}
}
}
...
@@ -1169,7 +1217,254 @@ namespace AMDiS {
...
@@ -1169,7 +1217,254 @@ namespace AMDiS {
if
(
nRankRows
<
100
)
if
(
nRankRows
<
100
)
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
d_n
nz
[
i
]
=
std
::
min
(
d_n
nz
[
i
],
nRankRows
);
matInteriorDiagN
nz
[
i
]
=
std
::
min
(
matInteriorDiagN
nz
[
i
],
nRankRows
);
}
}
void
PetscSolverGlobalMatrix
::
createPetscNnzStructureWithCoarseSpace
(
Matrix
<
DOFMatrix
*>
*
mat
,
ParallelDofMapping
&
dofMap
,
int
*&
diagNnz
,
int
*&
offdiagNnz
,
bool
localMatrix
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createPetscNnzStructure()"
);
TEST_EXIT_DBG
(
!
diagNnz
)(
"There is something wrong!
\n
"
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
int
nRankRows
=
dofMap
.
getRankDofs
();
int
rankStartIndex
=
dofMap
.
getStartDofs
();
diagNnz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
diagNnz
[
i
]
=
0
;
if
(
localMatrix
==
false
)
{
offdiagNnz
=
new
int
[
nRankRows
];
for
(
int
i
=
0
;
i
<
nRankRows
;
i
++
)
offdiagNnz
[
i
]
=
0
;
}
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
namespace
traits
=
mtl
::
traits
;
typedef
DOFMatrix
::
base_matrix_type
Matrix
;
typedef
vector
<
pair
<
int
,
int
>
>
MatrixNnzEntry
;
typedef
map
<
int
,
DofContainer
>
RankToDofContainer
;
// Stores to each rank a list of nnz entries (i.e. pairs of row and column
// index) that this rank will send to. These nnz entries will be assembled
// on this rank, but because the row DOFs are not DOFs of this rank they
// will be send to the owner of the row DOFs.
map
<
int
,
MatrixNnzEntry
>
sendMatrixEntry
;
// Maps to each DOF that must be send to another rank the rank number of the
// receiving rank.
map
<
pair
<
DegreeOfFreedom
,
int
>
,
int
>
sendDofToRank
;
// First, create for all ranks, to which we send data to, MatrixNnzEntry
// object with 0 entries.
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
sendMatrixEntry
[
it
.
getRank
()].
resize
(
0
);
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
sendDofToRank
[
make_pair
(
it
.
getDofIndex
(),
i
)]
=
it
.
getRank
();
}
}
// Create list of ranks from which we receive data from.
std
::
set
<
int
>
recvFromRank
;
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getSendDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
recvFromRank
.
insert
(
it
.
getRank
());
// === Traverse matrices to create nnz data. ===
int
nComponents
=
mat
->
getNumRows
();