Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
amdis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Backofen, Rainer
amdis
Commits
ac80508b
Commit
ac80508b
authored
Apr 20, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
And code revision of parallel dof mapping finished.
parent
5a7ff74f
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
80 additions
and
145 deletions
+80
-145
AMDiS/src/parallel/ParallelDofMapping.cc
AMDiS/src/parallel/ParallelDofMapping.cc
+16
-6
AMDiS/src/parallel/ParallelDofMapping.h
AMDiS/src/parallel/ParallelDofMapping.h
+21
-3
AMDiS/src/parallel/PetscSolverFeti.cc
AMDiS/src/parallel/PetscSolverFeti.cc
+11
-1
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+32
-129
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
+0
-6
No files found.
AMDiS/src/parallel/ParallelDofMapping.cc
View file @
ac80508b
...
...
@@ -252,7 +252,8 @@ namespace AMDiS {
rStartDofs
=
computeStartDofs
();
// And finally, compute the matrix indices.
computeMatIndex
();
if
(
needMatIndex
)
computeMatIndex
(
needMatIndexFromGlobal
);
}
...
...
@@ -271,10 +272,10 @@ namespace AMDiS {
}
void
ParallelDofMapping
::
computeMatIndex
()
void
ParallelDofMapping
::
computeMatIndex
(
bool
globalIndex
)
{
FUNCNAME
(
"ParallelDofMapping::computeMatIndex()"
);
dofToMatIndex
.
clear
();
// The offset is always added to the local matrix index. The offset for the
...
...
@@ -293,7 +294,10 @@ namespace AMDiS {
for
(
DofMap
::
iterator
it
=
dofMap
.
begin
();
it
!=
dofMap
.
end
();
++
it
)
{
if
(
data
[
feSpaces
[
i
]].
isRankDof
(
it
->
first
))
{
int
globalMatIndex
=
it
->
second
.
local
+
offset
;
dofToMatIndex
.
add
(
i
,
it
->
first
,
globalMatIndex
);
if
(
globalIndex
)
dofToMatIndex
.
add
(
i
,
it
->
second
.
global
,
globalMatIndex
);
else
dofToMatIndex
.
add
(
i
,
it
->
first
,
globalMatIndex
);
}
}
...
...
@@ -318,7 +322,10 @@ namespace AMDiS {
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
if
(
dofMap
.
count
(
it
.
getDofIndex
()))
sendGlobalDofs
.
push_back
(
dofToMatIndex
.
get
(
i
,
it
.
getDofIndex
()));
if
(
globalIndex
)
sendGlobalDofs
.
push_back
(
dofToMatIndex
.
get
(
i
,
dofMap
[
it
.
getDofIndex
()].
global
));
else
sendGlobalDofs
.
push_back
(
dofToMatIndex
.
get
(
i
,
it
.
getDofIndex
()));
stdMpi
.
send
(
it
.
getRank
(),
sendGlobalDofs
);
}
...
...
@@ -336,7 +343,10 @@ namespace AMDiS {
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
if
(
dofMap
.
count
(
it
.
getDofIndex
()))
{
DegreeOfFreedom
d
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
counter
++
];
dofToMatIndex
.
add
(
i
,
it
.
getDofIndex
(),
d
);
if
(
globalIndex
)
dofToMatIndex
.
add
(
i
,
dofMap
[
it
.
getDofIndex
()].
global
,
d
);
else
dofToMatIndex
.
add
(
i
,
it
.
getDofIndex
(),
d
);
}
}
}
...
...
AMDiS/src/parallel/ParallelDofMapping.h
View file @
ac80508b
...
...
@@ -260,6 +260,8 @@ namespace AMDiS {
sendDofs
(
NULL
),
recvDofs
(
NULL
),
hasNonLocalDofs
(
false
),
needMatIndex
(
false
),
needMatIndexFromGlobal
(
false
),
feSpaces
(
0
),
nRankDofs
(
-
1
),
nLocalDofs
(
-
1
),
...
...
@@ -374,6 +376,15 @@ namespace AMDiS {
ERROR_EXIT
(
"MUST BE IMPLEMENTED!
\n
"
);
}
void
setComputeMatIndex
(
bool
b
,
bool
global
=
false
)
{
needMatIndex
=
b
;
needMatIndexFromGlobal
=
global
;
}
/// Compute local and global matrix indices.
void
computeMatIndex
(
bool
globalIndex
);
protected:
/// Insert a new FE space DOF mapping for a given FE space.
void
addFeSpace
(
const
FiniteElemSpace
*
feSpace
);
...
...
@@ -390,9 +401,6 @@ namespace AMDiS {
/// Compute \ref rStartDofs.
int
computeStartDofs
();
/// Compute local and global matrix indices.
void
computeMatIndex
();
private:
/// MPI communicator object;
MPI
::
Intracomm
mpiComm
;
...
...
@@ -405,6 +413,16 @@ namespace AMDiS {
/// are also owned by this rank.
bool
hasNonLocalDofs
;
/// If true, matrix indeces for the stored DOFs are computed, see
/// \ref computeMatIndex.
bool
needMatIndex
;
/// If matrix indices should be computed, this variable defines if the
/// mapping from DOF indices to matrix row indices is defined on local
/// or global DOF indices. If true, the mapping is to specify and to use
/// on global ones, otherwise on local DOF indices.
bool
needMatIndexFromGlobal
;
/// Maps from FE space pointers to DOF mappings.
map
<
const
FiniteElemSpace
*
,
FeSpaceDofMap
>
data
;
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
ac80508b
...
...
@@ -244,15 +244,25 @@ namespace AMDiS {
primalDofMap
.
init
(
mpiComm
,
feSpaces
,
meshDistributor
->
getFeSpaces
(),
true
,
true
);
primalDofMap
.
setComputeMatIndex
(
true
);
dualDofMap
.
init
(
mpiComm
,
feSpaces
,
meshDistributor
->
getFeSpaces
(),
false
,
false
);
dualDofMap
.
setComputeMatIndex
(
true
);
lagrangeMap
.
init
(
mpiComm
,
feSpaces
,
meshDistributor
->
getFeSpaces
(),
true
,
true
);
lagrangeMap
.
setComputeMatIndex
(
true
);
localDofMap
.
init
(
mpiComm
,
feSpaces
,
meshDistributor
->
getFeSpaces
(),
false
,
false
);
if
(
fetiPreconditioner
==
FETI_DIRICHLET
)
localDofMap
.
setComputeMatIndex
(
true
);
if
(
fetiPreconditioner
==
FETI_DIRICHLET
)
{
interiorDofMap
.
init
(
mpiComm
,
feSpaces
,
meshDistributor
->
getFeSpaces
(),
false
,
false
);
interiorDofMap
.
setComputeMatIndex
(
true
);
}
}
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
ac80508b
...
...
@@ -26,37 +26,20 @@ namespace AMDiS {
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
dofMap
->
update
(
feSpaces
);
int
nRankRows
=
dofMap
->
getRankDofs
();
int
nOverallRows
=
dofMap
->
getOverallDofs
();
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
if
(
testddd
>
1
)
{
VecSetBlockSize
(
petscSolVec
,
testddd
);
VecSetBlockSize
(
petscTmpVec
,
testddd
);
}
// === Check if mesh was changed and, in this case, recompute matrix ===
// === nnz structure and matrix indices. ===
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiComm
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
recvAllValues
=
1
;
if
(
!
d_nnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
// Global DOF mapping must only be recomputed, if the NNZ structure has
// changed (thus, also the mesh has changed).
createGlobalDofMapping
(
feSpaces
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
dofMap
->
setComputeMatIndex
(
true
,
true
);
dofMap
->
update
(
feSpaces
);
if
(
d_nnz
)
{
delete
[]
d_nnz
;
...
...
@@ -70,6 +53,22 @@ namespace AMDiS {
}
// === Create PETSc vector (solution and a temporary vector). ===
int
nRankRows
=
dofMap
->
getRankDofs
();
int
nOverallRows
=
dofMap
->
getOverallDofs
();
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
mpiComm
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
if
(
testddd
>
1
)
{
VecSetBlockSize
(
petscSolVec
,
testddd
);
VecSetBlockSize
(
petscTmpVec
,
testddd
);
}
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ
(
mpiComm
,
nRankRows
,
nRankRows
,
...
...
@@ -134,7 +133,6 @@ namespace AMDiS {
TEST_EXIT_DBG
(
vec
)(
"No DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
dofMap
)(
"No parallel DOF map defined!
\n
"
);
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
int
nRankRows
=
dofMap
->
getRankDofs
();
int
nOverallRows
=
dofMap
->
getOverallDofs
();
...
...
@@ -279,7 +277,7 @@ namespace AMDiS {
// === Row DOF index is not periodic. ===
// Get PETSc's mat row index.
int
rowIndex
=
dof
ToMatIndex
.
get
(
nRowMat
,
globalRowDof
);
int
rowIndex
=
dof
Map
->
getMatIndex
(
nRowMat
,
globalRowDof
);
cols
.
clear
();
values
.
clear
();
...
...
@@ -292,7 +290,7 @@ namespace AMDiS {
// Test if the current col dof is a periodic dof.
bool
periodicCol
=
perMap
.
isPeriodic
(
colFe
,
globalColDof
);
// Get PETSc's mat col index.
int
colIndex
=
dof
ToMatIndex
.
get
(
nColMat
,
globalColDof
);
int
colIndex
=
dof
Map
->
getMatIndex
(
nColMat
,
globalColDof
);
// Ignore all zero entries, expect it is a diagonal entry.
if
(
value
(
*
icursor
)
==
0.0
&&
rowIndex
!=
colIndex
)
...
...
@@ -341,7 +339,7 @@ namespace AMDiS {
}
for
(
unsigned
int
i
=
0
;
i
<
newCols
.
size
();
i
++
)
{
cols
.
push_back
(
dof
ToMatIndex
.
get
(
nColMat
,
newCols
[
i
]));
cols
.
push_back
(
dof
Map
->
getMatIndex
(
nColMat
,
newCols
[
i
]));
values
.
push_back
(
scaledValue
);
}
}
...
...
@@ -428,8 +426,8 @@ namespace AMDiS {
// === Translate the matrix entries to PETSc's matrix.
for
(
unsigned
int
i
=
0
;
i
<
entry
.
size
();
i
++
)
{
int
rowIdx
=
dof
ToMatIndex
.
get
(
nRowMat
,
entry
[
i
].
first
);
int
colIdx
=
dof
ToMatIndex
.
get
(
nColMat
,
entry
[
i
].
second
);
int
rowIdx
=
dof
Map
->
getMatIndex
(
nRowMat
,
entry
[
i
].
first
);
int
colIdx
=
dof
Map
->
getMatIndex
(
nColMat
,
entry
[
i
].
second
);
colsMap
[
rowIdx
].
push_back
(
colIdx
);
valsMap
[
rowIdx
].
push_back
(
scaledValue
);
...
...
@@ -474,7 +472,7 @@ namespace AMDiS {
DegreeOfFreedom
globalRowDof
=
(
*
dofMap
)[
feSpace
][
dofIt
.
getDOFIndex
()].
global
;
// Get PETSc's mat index of the row DOF.
int
index
=
dof
ToMatIndex
.
get
(
nRowVec
,
globalRowDof
);
int
index
=
dof
Map
->
getMatIndex
(
nRowVec
,
globalRowDof
);
if
(
perMap
.
isPeriodic
(
feSpace
,
globalRowDof
))
{
std
::
set
<
int
>&
perAsc
=
perMap
.
getAssociations
(
feSpace
,
globalRowDof
);
...
...
@@ -484,7 +482,7 @@ namespace AMDiS {
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
int
mappedDof
=
perMap
.
map
(
feSpace
,
*
perIt
,
globalRowDof
);
int
mappedIndex
=
dof
ToMatIndex
.
get
(
nRowVec
,
mappedDof
);
int
mappedIndex
=
dof
Map
->
getMatIndex
(
nRowVec
,
mappedDof
);
VecSetValues
(
petscVec
,
1
,
&
mappedIndex
,
&
value
,
ADD_VALUES
);
}
}
else
{
...
...
@@ -577,7 +575,7 @@ namespace AMDiS {
int
globalRowDof
=
(
*
dofMap
)[
feSpaces
[
i
]][
*
cursor
].
global
;
// The corresponding global matrix row index of the current row DOF.
int
petscRowIdx
=
dof
ToMatIndex
.
get
(
i
,
globalRowDof
);
int
petscRowIdx
=
dof
Map
->
getMatIndex
(
i
,
globalRowDof
);
if
((
*
dofMap
)[
feSpaces
[
i
]].
isRankDof
(
*
cursor
))
{
// === The current row DOF is a rank DOF, so create the ===
...
...
@@ -601,7 +599,7 @@ namespace AMDiS {
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
int
globalColDof
=
(
*
dofMap
)[
feSpaces
[
j
]][
col
(
*
icursor
)].
global
;
int
petscColIdx
=
dof
ToMatIndex
.
get
(
j
,
globalColDof
);
int
petscColIdx
=
dof
Map
->
getMatIndex
(
j
,
globalColDof
);
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
...
...
@@ -629,7 +627,7 @@ namespace AMDiS {
if
(
value
(
*
icursor
)
!=
0.0
)
{
int
globalColDof
=
(
*
dofMap
)[
feSpaces
[
j
]][
col
(
*
icursor
)].
global
;
int
petscColIdx
=
dof
ToMatIndex
.
get
(
j
,
globalColDof
);
int
petscColIdx
=
dof
Map
->
getMatIndex
(
j
,
globalColDof
);
sendMatrixEntry
[
sendToRank
].
push_back
(
make_pair
(
petscRowIdx
,
petscColIdx
));
...
...
@@ -686,99 +684,4 @@ namespace AMDiS {
d_nnz
[
i
]
=
std
::
min
(
d_nnz
[
i
],
nRankRows
);
}
void
PetscSolverGlobalMatrix
::
createGlobalDofMapping
(
vector
<
const
FiniteElemSpace
*>
&
feSpaces
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createGlobalDofMapping()"
);
int
offset
=
dofMap
->
getStartDofs
();
Mesh
*
mesh
=
feSpaces
[
0
]
->
getMesh
();
dofToMatIndex
.
clear
();
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
// === Create indices for all DOFs in rank' domain. ===
std
::
set
<
const
DegreeOfFreedom
*>
rankDofSet
;
mesh
->
getAllDofs
(
feSpaces
[
i
],
rankDofSet
);
for
(
std
::
set
<
const
DegreeOfFreedom
*>::
iterator
it
=
rankDofSet
.
begin
();
it
!=
rankDofSet
.
end
();
++
it
)
if
((
*
dofMap
)[
feSpaces
[
i
]].
isRankDof
(
**
it
))
{
int
globalIndex
=
(
*
dofMap
)[
feSpaces
[
i
]][
**
it
].
global
;
int
globalMatIndex
=
globalIndex
-
(
*
dofMap
)[
feSpaces
[
i
]].
rStartDofs
+
offset
;
dofToMatIndex
.
add
(
i
,
globalIndex
,
globalMatIndex
);
}
// === Communicate interior boundary DOFs between domains. ===
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
);
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getSendDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
vector
<
DegreeOfFreedom
>
sendGlobalDofs
;
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
int
globalIndex
=
(
*
dofMap
)[
feSpaces
[
i
]][
it
.
getDofIndex
()].
global
;
int
globalMatIndex
=
dofToMatIndex
.
get
(
i
,
globalIndex
);
sendGlobalDofs
.
push_back
(
globalMatIndex
);
}
stdMpi
.
send
(
it
.
getRank
(),
sendGlobalDofs
);
}
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
stdMpi
.
recv
(
it
.
getRank
());
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
int
globalIndex
=
(
*
dofMap
)[
feSpaces
[
i
]][
it
.
getDofIndex
()].
global
;
int
globalMatIndex
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()];
dofToMatIndex
.
add
(
i
,
globalIndex
,
globalMatIndex
);
}
// === Communicate DOFs on periodic boundaries. ===
stdMpi
.
clear
();
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getPeriodicDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
vector
<
DegreeOfFreedom
>
sendGlobalDofs
;
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
int
ind0
=
(
*
dofMap
)[
feSpaces
[
i
]][
it
.
getDofIndex
()].
global
;
int
ind1
=
dofToMatIndex
.
get
(
i
,
ind0
);
sendGlobalDofs
.
push_back
(
ind0
);
sendGlobalDofs
.
push_back
(
ind1
);
}
stdMpi
.
send
(
it
.
getRank
(),
sendGlobalDofs
);
stdMpi
.
recv
(
it
.
getRank
());
}
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getPeriodicDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
int
ind0
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()
*
2
];
int
ind1
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()
*
2
+
1
];
dofToMatIndex
.
add
(
i
,
ind0
,
ind1
);
}
// === Update offset. ===
offset
+=
(
*
dofMap
)[
feSpaces
[
i
]].
nRankDofs
;
}
}
}
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
View file @
ac80508b
...
...
@@ -68,8 +68,6 @@ namespace AMDiS {
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
nRowVec
,
bool
rankOnly
=
false
);
void
createGlobalDofMapping
(
vector
<
const
FiniteElemSpace
*>
&
feSpaces
);
protected:
/// Arrays definig the non zero pattern of Petsc's matrix.
int
*
d_nnz
,
*
o_nnz
;
...
...
@@ -91,10 +89,6 @@ namespace AMDiS {
/// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields.
bool
alwaysCreateNnzStructure
;
/// Mapping from global DOF indices to global matrix indices under
/// consideration of possibly multiple components.
DofToMatIndex
dofToMatIndex
;
};
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment