Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
10
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Open sidebar
Aland, Sebastian
amdis
Commits
06c5b791
Commit
06c5b791
authored
Oct 24, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fixed a lot of small problems.
parent
aa82de2b
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
100 additions
and
30 deletions
+100
-30
AMDiS/src/parallel/MeshDistributor.cc
AMDiS/src/parallel/MeshDistributor.cc
+39
-22
AMDiS/src/parallel/MeshDistributor.h
AMDiS/src/parallel/MeshDistributor.h
+5
-0
AMDiS/src/parallel/ParallelDofMapping.cc
AMDiS/src/parallel/ParallelDofMapping.cc
+35
-3
AMDiS/src/parallel/ParallelDofMapping.h
AMDiS/src/parallel/ParallelDofMapping.h
+18
-1
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+3
-4
No files found.
AMDiS/src/parallel/MeshDistributor.cc
View file @
06c5b791
...
...
@@ -88,7 +88,8 @@ namespace AMDiS {
lastMeshChangeIndex
(
0
),
createBoundaryDofFlag
(
0
),
boundaryDofInfo
(
1
),
meshAdaptivity
(
true
)
meshAdaptivity
(
true
),
hasPeriodicBoundary
(
false
)
{
FUNCNAME
(
"MeshDistributor::ParalleDomainBase()"
);
...
...
@@ -319,9 +320,6 @@ namespace AMDiS {
MSG
(
"Debug mode tests finished!
\n
"
);
#endif
// Create periodic DOF mapping, if there are periodic boundaries.
createPeriodicMap
();
// Remove periodic boundary conditions in sequential problem definition.
removePeriodicBoundaryConditions
();
...
...
@@ -339,9 +337,6 @@ namespace AMDiS {
updateLocalGlobalNumbering
();
// === Update periodic mapping, if there are periodic boundaries. ===
createPeriodicMap
();
#if (DEBUG != 0)
ParallelDebug
::
testPeriodicBoundary
(
*
this
);
#endif
...
...
@@ -990,18 +985,13 @@ namespace AMDiS {
MPI
::
COMM_WORLD
.
Barrier
();
MSG
(
"Parallel mesh adaption needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
first
);
#if (DEBUG != 0)
debug
::
writeMesh
(
feSpaces
[
0
],
-
1
,
debugOutputDir
+
"mesh"
);
#endif
// Because the mesh has been changed, update the DOF numbering and mappings.
updateLocalGlobalNumbering
();
// Update periodic mapping, if there are periodic boundaries.
createPeriodicMap
();
// === Update the DOF numbering and mappings. ===
updateLocalGlobalNumbering
();
#if (DEBUG != 0)
debug
::
writeMesh
(
feSpaces
[
0
],
-
1
,
debugOutputDir
+
"mesh"
);
ParallelDebug
::
testPeriodicBoundary
(
*
this
);
#endif
...
...
@@ -1518,6 +1508,7 @@ namespace AMDiS {
createInteriorBoundary
(
false
);
updateLocalGlobalNumbering
();
...
...
@@ -1538,9 +1529,6 @@ namespace AMDiS {
}
}
// === Update periodic mapping, if there are periodic boundaries. ===
createPeriodicMap
();
#if (DEBUG != 0)
MSG
(
"AMDiS runs in debug mode, so make some test ...
\n
"
);
...
...
@@ -1584,6 +1572,13 @@ namespace AMDiS {
intBoundarySd
.
create
(
levelData
,
1
,
elObjDb
);
ParallelDebug
::
printBoundaryInfo
(
intBoundarySd
,
0
,
true
);
}
if
(
firstCall
)
{
int
tmpSend
=
static_cast
<
int
>
(
intBoundary
.
hasPeriodic
());
int
tmpRecv
=
0
;
mpiComm
.
Allreduce
(
&
tmpSend
,
&
tmpRecv
,
1
,
MPI_INT
,
MPI_MAX
);
hasPeriodicBoundary
=
static_cast
<
bool
>
(
tmpRecv
);
}
}
...
...
@@ -1680,6 +1675,8 @@ namespace AMDiS {
debug
::
createSortedDofs
(
mesh
,
elMap
);
#endif
// === Update DOF communicator objects. ===
createBoundaryDofs
();
...
...
@@ -1691,14 +1688,30 @@ namespace AMDiS {
vector
<
const
FiniteElemSpace
*>&
dofMapSpaces
=
dofMaps
[
i
]
->
getFeSpaces
();
dofMaps
[
i
]
->
clear
();
if
(
hasPeriodicBoundary
)
dofMaps
[
i
]
->
setComputeMatIndex
(
true
);
for
(
int
j
=
0
;
j
<
static_cast
<
int
>
(
dofMapSpaces
.
size
());
j
++
)
updateLocalGlobalNumbering
(
*
(
dofMaps
[
i
]),
dofMapSpaces
[
j
]);
dofMaps
[
i
]
->
update
();
#if (DEBUG != 0)
dofMaps
[
i
]
->
printInfo
();
#endif
}
// === Create periodic DOF maps, if there are periodic boundaries. ===
if
(
hasPeriodicBoundary
)
{
createPeriodicMap
();
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dofMaps
.
size
());
i
++
)
dofMaps
[
i
]
->
updateMatIndex
();
}
// === Update DOF admins due to new number of DOFs. ===
lastMeshChangeIndex
=
mesh
->
getChangeIndex
();
...
...
@@ -1716,10 +1729,11 @@ namespace AMDiS {
vector
<
const
FiniteElemSpace
*>&
dofMapSpaces
=
dofMaps
[
i
]
->
getFeSpaces
();
for
(
int
j
=
0
;
j
<
static_cast
<
int
>
(
dofMapSpaces
.
size
());
j
++
)
{
MSG
(
"| FE space = %d (pointer adr %p):
\n
"
,
j
,
feSpaces
[
j
]);
MSG
(
"| nRankDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpaces
[
j
]].
nRankDofs
);
MSG
(
"| nOverallDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpaces
[
j
]].
nOverallDofs
);
MSG
(
"| rStartDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpaces
[
j
]].
rStartDofs
);
const
FiniteElemSpace
*
feSpace
=
dofMapSpaces
[
j
];
MSG
(
"| FE space = %d (pointer adr %p):
\n
"
,
j
,
feSpace
);
MSG
(
"| nRankDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpace
].
nRankDofs
);
MSG
(
"| nOverallDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpace
].
nOverallDofs
);
MSG
(
"| rStartDofs = %d
\n
"
,
(
*
(
dofMaps
[
i
]))[
feSpace
].
rStartDofs
);
}
}
...
...
@@ -1828,6 +1842,7 @@ namespace AMDiS {
ComponentDofMap
&
dofMap
=
(
*
(
dofMaps
[
0
]))[
feSpace
];
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
,
false
);
// === Each rank traverse its periodic boundaries and sends the DOF ===
// === indices to the rank "on the other side" of the periodic boundary. ===
...
...
@@ -2035,6 +2050,7 @@ namespace AMDiS {
SerUtil
::
serialize
(
out
,
nMeshChangesAfterLastRepartitioning
);
SerUtil
::
serialize
(
out
,
repartitioningCounter
);
SerUtil
::
serialize
(
out
,
hasPeriodicBoundary
);
}
...
...
@@ -2085,6 +2101,7 @@ namespace AMDiS {
SerUtil
::
deserialize
(
in
,
nMeshChangesAfterLastRepartitioning
);
SerUtil
::
deserialize
(
in
,
repartitioningCounter
);
SerUtil
::
deserialize
(
in
,
hasPeriodicBoundary
);
deserialized
=
true
;
}
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
06c5b791
...
...
@@ -574,6 +574,11 @@ namespace AMDiS {
/// is set to true, and thus no special assumption are made.
bool
meshAdaptivity
;
/// Specifies whether the global domain has periodic boundaries. Thus, this
/// variable is not related to rank's subdomain but to the global problem
/// and therefore the value if the same on all ranks.
bool
hasPeriodicBoundary
;
/// Set of all parallel DOF mapping object that are registered by parallel
/// solver objects and must be updated automatically after mesh change.
vector
<
ParallelDofMapping
*>
dofMaps
;
...
...
AMDiS/src/parallel/ParallelDofMapping.cc
View file @
06c5b791
...
...
@@ -358,6 +358,14 @@ namespace AMDiS {
}
void
ParallelDofMapping
::
updateMatIndex
()
{
FUNCNAME
(
"ParallelDofMapping::updateMatIndex()"
);
computeMatIndex
(
needMatIndexFromGlobal
);
}
void
ParallelDofMapping
::
computeMatIndex
(
bool
globalIndex
)
{
FUNCNAME
(
"ParallelDofMapping::computeMatIndex()"
);
...
...
@@ -488,7 +496,7 @@ namespace AMDiS {
int
counter
=
0
;
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
if
(
dofMap
.
count
(
it
.
getDofIndex
()))
{
if
(
globalIndex
)
{
TEST_EXIT_DBG
(
counter
+
2
<=
stdMpi
.
getRecvData
(
it
.
getRank
()).
size
())
...
...
@@ -504,6 +512,7 @@ namespace AMDiS {
stdMpi
.
getRecvData
(
it
.
getRank
())[
counter
++
]);
}
}
}
}
}
}
...
...
@@ -543,10 +552,33 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDofMapping::printInfo()"
);
MSG
(
"=== Parallel DOF mapping debug information ===
\n
"
);
if
(
mode
==
COMPONENT_WISE
)
{
MSG
(
"
M
apping is defined by component numbers!
\n
"
);
MSG
(
"
m
apping is defined by component numbers!
\n
"
);
}
else
{
MSG
(
"Mapping is defined by FE spaces!
\n
"
);
MSG
(
" mapping is defined by FE spaces!
\n
"
);
}
MSG
(
" matrix index is based on global DOF indices: %d
\n
"
,
needMatIndexFromGlobal
);
MSG
(
" nRankDofs = %d nLocalDofs = %d nOverallDofs = %d rStartDofs = %d
\n
"
,
nRankDofs
,
nLocalDofs
,
nOverallDofs
,
rStartDofs
);
int
nComponents
=
componentSpaces
.
size
();
int
nFeSpaces
=
feSpaces
.
size
();
MSG
(
" number of components: %d number of different FE spaces: %d
\n
"
,
nComponents
,
nFeSpaces
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
MSG
(
" component %d:
\n
"
,
i
);
MSG
(
" dof-to-mat-index has %d mappings
\n
"
,
dofToMatIndex
.
getSize
(
i
));
if
(
dofToMatIndex
.
getSize
(
i
)
>
0
)
{
MSG
(
" dof-to-mat-index starts with (%d -> %d) and ends with (%d -> %d)
\n
"
,
dofToMatIndex
.
getData
(
i
).
begin
()
->
first
,
dofToMatIndex
.
getData
(
i
).
begin
()
->
second
,
(
dofToMatIndex
.
getData
(
i
).
end
()
-
1
)
->
first
,
(
dofToMatIndex
.
getData
(
i
).
end
()
-
1
)
->
second
);
}
}
}
}
AMDiS/src/parallel/ParallelDofMapping.h
View file @
06c5b791
...
...
@@ -50,6 +50,8 @@ namespace AMDiS {
class
DofToMatIndex
{
public:
typedef
boost
::
container
::
flat_map
<
DegreeOfFreedom
,
int
>
MapType
;
DofToMatIndex
()
{}
/// Reset the data structure.
...
...
@@ -86,6 +88,18 @@ namespace AMDiS {
return
data
[
component
][
dof
];
}
/// Returns the number of DOF mappings in one component
inline
int
getSize
(
int
component
)
{
return
data
[
component
].
size
();
}
/// Returns the whole mapping for one component
inline
MapType
&
getData
(
int
component
)
{
return
data
[
component
];
}
/// Returns for a given matrix index the component and (local or global) DOF
/// index. As the data structure is not made for this kind of reverse
/// search, this is very slow and should be only used for debugging.
...
...
@@ -94,7 +108,7 @@ namespace AMDiS {
private:
/// The mapping data. For each system component there is a specific map that
/// maps global DOF indices to global matrix indices.
map
<
int
,
boost
::
container
::
flat_map
<
DegreeOfFreedom
,
int
>
>
data
;
map
<
int
,
MapType
>
data
;
};
...
...
@@ -754,6 +768,9 @@ namespace AMDiS {
/// Update the mapping.
void
update
();
/// Updates only the DOF to matrix index mapping
void
updateMatIndex
();
/// Returns the global matrix index of a given DOF for a given
/// component number.
inline
int
getMatIndex
(
int
ithComponent
,
DegreeOfFreedom
d
)
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
06c5b791
...
...
@@ -526,12 +526,11 @@ namespace AMDiS {
}
}
MatZeroRows
(
mpiMat
,
dirichletRows
.
size
(),
&
(
dirichletRows
[
0
]),
1
.0
,
MatZeroRows
(
mpiMat
,
dirichletRows
.
size
(),
&
(
dirichletRows
[
0
]),
0
.0
,
PETSC_NULL
,
PETSC_NULL
);
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dirichletRows
.
size
());
i
++
)
{
MatSetValue
(
mpiMat
,
dirichletRows
[
i
],
dirichletCols
[
i
],
dirichletValues
[
i
],
INSERT_VALUES
);
}
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dirichletRows
.
size
());
i
++
)
MatSetValue
(
mpiMat
,
dirichletRows
[
i
],
dirichletCols
[
i
],
dirichletValues
[
i
],
INSERT_VALUES
);
MatAssemblyBegin
(
mpiMat
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mpiMat
,
MAT_FINAL_ASSEMBLY
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment