Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
23333bca
Commit
23333bca
authored
Jul 08, 2010
by
Thomas Witkowski
Browse files
Periodic boundaries should now work in 2D in parallel computations.
parent
82ab1918
Changes
7
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/FirstOrderAssembler.cc
View file @
23333bca
...
...
@@ -144,6 +144,10 @@ namespace AMDiS {
void
Stand10
::
calculateElementVector
(
const
ElInfo
*
elInfo
,
ElementVector
&
vec
)
{
FUNCNAME
(
"Stand10::calculateElementVector()"
);
MSG
(
"calc vec here!
\n
"
);
DimVec
<
double
>
grdPsi
(
dim
,
DEFAULT_VALUE
,
0.0
);
int
nPoints
=
quadrature
->
getNumPoints
();
int
myRank
=
omp_get_thread_num
();
...
...
@@ -153,7 +157,7 @@ namespace AMDiS {
for
(
int
iq
=
0
;
iq
<
nPoints
;
iq
++
)
Lb
[
iq
].
set
(
0.0
);
for
(
unsigned
int
i
=
0
;
i
<
terms
[
myRank
].
size
();
i
++
)
(
static_cast
<
FirstOrderTerm
*>
((
terms
[
myRank
][
i
])))
->
getLb
(
elInfo
,
nPoints
,
Lb
);
(
static_cast
<
FirstOrderTerm
*>
((
terms
[
myRank
][
i
])))
->
getLb
(
elInfo
,
nPoints
,
Lb
);
for
(
int
iq
=
0
;
iq
<
nPoints
;
iq
++
)
{
Lb
[
iq
]
*=
elInfo
->
getDet
();
...
...
@@ -218,6 +222,10 @@ namespace AMDiS {
void
Quad10
::
calculateElementVector
(
const
ElInfo
*
elInfo
,
ElementVector
&
vec
)
{
FUNCNAME
(
"Quad10::calculateElementVector()"
);
MSG
(
"calc vec here!
\n
"
);
VectorOfFixVecs
<
DimVec
<
double
>
>
*
grdPsi
;
if
(
firstCall
)
{
...
...
@@ -441,6 +449,10 @@ namespace AMDiS {
void
Pre10
::
calculateElementVector
(
const
ElInfo
*
elInfo
,
ElementVector
&
vec
)
{
FUNCNAME
(
"Pre10::calculateElementVector()"
);
MSG
(
"calc vec here!
\n
"
);
const
int
*
k
;
const
double
*
values
;
...
...
AMDiS/src/parallel/GlobalMatrixSolver.cc
View file @
23333bca
...
...
@@ -91,9 +91,9 @@ namespace AMDiS {
values
.
clear
();
// Global index of the current row dof.
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
*
cursor
);
int
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
*
cursor
);
// Test if the current row dof is a periodic dof.
bool
periodicRow
=
(
meshDistributor
->
get
PeriodicDof
Map
().
count
(
globalRowDof
)
>
0
)
;
bool
periodicRow
=
meshDistributor
->
is
PeriodicDof
(
globalRowDof
);
// === Traverse all non zero entries of the row and produce vector cols ===
...
...
@@ -112,24 +112,23 @@ namespace AMDiS {
// If the current row is not periodic, but the current dof index is periodic,
// we have to duplicate the value to the other corresponding periodic columns.
if
(
periodicRow
==
false
&&
meshDistributor
->
getPeriodicDofMap
().
count
(
globalColDof
)
>
0
)
{
if
(
!
periodicRow
&&
meshDistributor
->
isPeriodicDof
(
globalColDof
))
{
// The value is assign to n matrix entries, therefore, every entry
// has only 1/n value of the original entry.
double
scalFactor
=
1.0
/
pow
(
2.0
,
meshDistributor
->
getPeriodicDof
(
globalColDof
)
.
size
());
std
::
set
<
int
>&
perAsc
=
meshDistributor
->
getPerDofAssociations
(
globalColDof
);
double
scalFactor
=
1.0
/
(
perAsc
.
size
()
+
1.0
);
// Insert original entry.
cols
.
push_back
(
colIndex
);
values
.
push_back
(
value
(
*
icursor
)
*
scalFactor
);
// Insert the periodic entries.
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
meshDistributor
->
getPeriodicDof
(
globalColDof
).
begin
();
it
!=
meshDistributor
->
getPeriodicDof
(
globalColDof
).
end
();
++
it
)
{
cols
.
push_back
(
*
it
*
dispMult
+
dispAddCol
);
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
int
mappedDof
=
meshDistributor
->
getPeriodicMapping
(
*
perIt
,
globalColDof
);
cols
.
push_back
(
mappedDof
*
dispMult
+
dispAddCol
);
values
.
push_back
(
value
(
*
icursor
)
*
scalFactor
);
}
}
else
{
// The col dof index is not periodic, simple add entry.
cols
.
push_back
(
colIndex
);
...
...
@@ -147,9 +146,10 @@ namespace AMDiS {
if
(
periodicRow
)
{
// The row dof is periodic, so send dof to all the corresponding rows.
std
::
set
<
int
>&
perAsc
=
meshDistributor
->
getPerDofAssociations
(
globalRowDof
);
double
scalFactor
=
1.0
/
(
perAsc
.
size
()
+
1.0
);
double
scalFactor
=
1.0
/
pow
(
2.0
,
meshDistributor
->
getPeriodicDof
(
globalRowDof
).
size
());
for
(
unsigned
int
i
=
0
;
i
<
values
.
size
();
i
++
)
values
[
i
]
*=
scalFactor
;
...
...
@@ -157,29 +157,23 @@ namespace AMDiS {
MatSetValues
(
petscMatrix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
std
::
vector
<
int
>
perCols
;
perCols
.
reserve
(
300
);
std
::
vector
<
double
>
perValues
;
perValues
.
reserve
(
300
);
for
(
unsigned
int
i
=
0
;
i
<
cols
.
size
();
i
++
)
{
int
tmp
=
(
cols
[
i
]
-
dispAddCol
)
/
dispMult
;
if
(
meshDistributor
->
getPeriodicDofMap
().
count
(
tmp
)
==
0
)
{
perCols
.
push_back
(
cols
[
i
]);
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
std
::
vector
<
int
>
perCols
;
perCols
.
reserve
(
300
);
std
::
vector
<
double
>
perValues
;
perValues
.
reserve
(
300
);
for
(
unsigned
int
i
=
0
;
i
<
cols
.
size
();
i
++
)
{
int
tmp
=
(
cols
[
i
]
-
dispAddCol
)
/
dispMult
;
if
(
meshDistributor
->
isPeriodicDof
(
tmp
,
*
perIt
))
perCols
.
push_back
((
meshDistributor
->
getPeriodicMapping
(
*
perIt
,
tmp
)
*
dispMult
)
+
dispAddCol
);
else
perCols
.
push_back
(
cols
[
i
]);
perValues
.
push_back
(
values
[
i
]);
}
else
{
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
meshDistributor
->
getPeriodicDof
(
tmp
).
begin
();
it
!=
meshDistributor
->
getPeriodicDof
(
tmp
).
end
();
++
it
)
{
perValues
.
push_back
(
values
[
i
]);
perCols
.
push_back
((
*
it
*
dispMult
)
+
dispAddCol
);
}
}
}
// Send the row to all periodic row indices.
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
meshDistributor
->
getPeriodicDof
(
globalRowDof
).
begin
();
it
!=
meshDistributor
->
getPeriodicDof
(
globalRowDof
).
end
();
++
it
)
{
int
perRowIndex
=
*
it
*
dispMult
+
dispAddRow
;
int
perRowIndex
=
(
meshDistributor
->
getPeriodicMapping
(
*
perIt
,
globalRowDof
)
*
dispMult
)
+
dispAddRow
;
MatSetValues
(
petscMatrix
,
1
,
&
perRowIndex
,
perCols
.
size
(),
&
(
perCols
[
0
]),
&
(
perValues
[
0
]),
ADD_VALUES
);
}
...
...
@@ -202,23 +196,21 @@ namespace AMDiS {
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
// Calculate global row index of the dof.
DegreeOfFreedom
globalRow
=
DegreeOfFreedom
globalRow
Dof
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
// Calculate petsc index of the row dof.
int
index
=
globalRow
*
dispMult
+
dispAdd
;
if
(
meshDistributor
->
getPeriodicDofMap
().
count
(
globalRow
)
>
0
)
{
// The dof index is periodic, so devide the value to all dof entries.
int
index
=
globalRowDof
*
dispMult
+
dispAdd
;
double
value
=
*
dofIt
/
(
meshDistributor
->
getPeriodicDof
(
globalRow
).
size
()
+
1.0
);
if
(
meshDistributor
->
isPeriodicDof
(
globalRowDof
))
{
std
::
set
<
int
>&
perAsc
=
meshDistributor
->
getPerDofAssociations
(
globalRowDof
);
double
value
=
*
dofIt
/
(
perAsc
.
size
()
+
1.0
);
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
meshDistributor
->
getPeriodicDof
(
globalRow
).
begin
();
it
!
=
meshDistributor
->
getPeriodic
Dof
(
globalRow
).
end
();
++
it
)
{
in
dex
=
*
it
*
dispMult
+
dispAdd
;
VecSetValues
(
petscVec
,
1
,
&
i
ndex
,
&
value
,
ADD_VALUES
);
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
int
mappedDof
=
meshDistributor
->
getPeriodic
Mapping
(
*
perIt
,
globalRowDof
);
in
t
mappedIndex
=
mappedDof
*
dispMult
+
dispAdd
;
VecSetValues
(
petscVec
,
1
,
&
mappedI
ndex
,
&
value
,
ADD_VALUES
);
}
}
else
{
// The dof index is not periodic.
double
value
=
*
dofIt
;
...
...
AMDiS/src/parallel/InteriorBoundary.cc
View file @
23333bca
...
...
@@ -112,6 +112,8 @@ namespace AMDiS {
SerUtil
::
serialize
(
out
,
bound
.
neighObj
.
ithObj
);
SerUtil
::
serialize
(
out
,
bound
.
neighObj
.
reverseMode
);
serializeExcludeList
(
out
,
bound
.
neighObj
.
excludedSubstructures
);
SerUtil
::
serialize
(
out
,
bound
.
type
);
}
}
}
...
...
@@ -148,6 +150,8 @@ namespace AMDiS {
SerUtil
::
deserialize
(
in
,
bound
.
neighObj
.
reverseMode
);
deserializeExcludeList
(
in
,
bound
.
neighObj
.
excludedSubstructures
);
SerUtil
::
deserialize
(
in
,
bound
.
type
);
TEST_EXIT_DBG
(
elIndexMap
.
count
(
bound
.
rankObj
.
elIndex
)
==
1
)
(
"Cannot find element with index %d for deserialization!
\n
"
,
bound
.
rankObj
.
elIndex
);
...
...
AMDiS/src/parallel/InteriorBoundary.h
View file @
23333bca
...
...
@@ -28,6 +28,7 @@
#include
"AMDiS_fwd.h"
#include
"MacroElement.h"
#include
"Element.h"
#include
"Boundary.h"
namespace
AMDiS
{
...
...
@@ -106,6 +107,11 @@ namespace AMDiS {
/// The object on the other side of the boundary.
BoundaryObject
neighObj
;
/// Integer flag that is used to distinguish between different types of
/// boundaries. Till now it is used only for periodic boundaries, which are also
/// handles as interior boundaries.
BoundaryType
type
;
};
/** \brief
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
23333bca
...
...
@@ -1035,15 +1035,20 @@ namespace AMDiS {
// === sible for this boundary. ===
if
(
BoundaryManager
::
isBoundaryPeriodic
(
elInfo
->
getBoundary
(
subObj
,
i
)))
{
// We have found an element that is at an interior, periodic boundary.
// We have found an element that is at an periodic boundary, that is
// also handled as an interior boundary between two ranks.
AtomicBoundary
&
b
=
periodicBoundary
.
getNewAtomic
(
otherElementRank
);
b
=
bound
;
b
.
type
=
elInfo
->
getBoundary
(
subObj
,
i
);
if
(
mpiRank
>
otherElementRank
)
b
.
rankObj
.
setReverseMode
(
b
.
neighObj
,
feSpace
);
else
b
.
neighObj
.
setReverseMode
(
b
.
rankObj
,
feSpace
);
}
else
{
// We have found an element that is at an interior, non-periodic boundary.
if
(
mpiRank
>
otherElementRank
)
{
AtomicBoundary
&
b
=
myIntBoundary
.
getNewAtomic
(
otherElementRank
);
b
=
bound
;
...
...
@@ -1780,6 +1785,7 @@ namespace AMDiS {
MSG("nRankDofs = %d\n", nRankDofs);
MSG("nOverallDofs = %d\n", nOverallDofs);
MSG("rstart %d\n", rstart);
/*
for (RankToDofContainer::iterator it = sendDofs.begin();
it != sendDofs.end(); ++it) {
MSG("send %d DOFs to rank %d\n", it->second.size(), it->first);
...
...
@@ -1794,6 +1800,7 @@ namespace AMDiS {
for (unsigned int i = 0; i < it->second.size(); i++)
MSG("%d recv DOF: %d\n", i, *(it->second[i]));
}
*/
std::set<const DegreeOfFreedom*> testDofs;
debug::getAllDofs(feSpace, testDofs);
...
...
@@ -1954,6 +1961,7 @@ namespace AMDiS {
// Clear all periodic dof mappings calculated before. We do it from scratch.
periodicDof
.
clear
();
perDofAssociations
.
clear
();
StdMpi
<
std
::
vector
<
int
>
>
stdMpi
(
mpiComm
,
false
);
...
...
@@ -1961,6 +1969,7 @@ namespace AMDiS {
// === to the rank "on the other side" of the periodic boundary. ===
RankToDofContainer
rankPeriodicDofs
;
std
::
map
<
int
,
std
::
vector
<
int
>
>
rankToDofType
;
for
(
RankToBoundMap
::
iterator
it
=
periodicBoundary
.
boundary
.
begin
();
it
!=
periodicBoundary
.
boundary
.
end
();
++
it
)
{
...
...
@@ -1988,6 +1997,9 @@ namespace AMDiS {
}
el
->
getVertexDofs
(
feSpace
,
boundIt
->
rankObj
,
dofs
);
el
->
getNonVertexDofs
(
feSpace
,
boundIt
->
rankObj
,
dofs
);
for
(
unsigned
int
i
=
0
;
i
<
dofs
.
size
();
i
++
)
rankToDofType
[
it
->
first
].
push_back
(
boundIt
->
type
);
}
// Send the global indices to the rank on the other side.
...
...
@@ -1997,7 +2009,6 @@ namespace AMDiS {
// Receive from this rank the same number of dofs.
stdMpi
.
recv
(
it
->
first
,
dofs
.
size
());
// rankPeriodicDofs[it->first] = dofs;
}
stdMpi
.
updateSendDataSize
();
...
...
@@ -2010,20 +2021,106 @@ namespace AMDiS {
// === dofs from the other ranks. ===
std
::
map
<
DegreeOfFreedom
,
std
::
set
<
int
>
>
dofFromRank
;
for
(
RankToBoundMap
::
iterator
it
=
periodicBoundary
.
boundary
.
begin
();
it
!=
periodicBoundary
.
boundary
.
end
();
++
it
)
{
DofContainer
&
dofs
=
rankPeriodicDofs
[
it
->
first
];
std
::
vector
<
int
>&
types
=
rankToDofType
[
it
->
first
];
TEST_EXIT_DBG
(
dofs
.
size
()
==
types
.
size
())(
"Should not happen!
\n
"
);
// Added the received dofs to the mapping.
for
(
unsigned
int
i
=
0
;
i
<
dofs
.
size
();
i
++
)
{
int
globalDofIndex
=
mapLocalGlobalDofs
[
*
(
dofs
[
i
])];
periodicDof
[
globalDofIndex
].
insert
(
stdMpi
.
getRecvData
(
it
->
first
)[
i
]);
int
mapGlobalDofIndex
=
stdMpi
.
getRecvData
(
it
->
first
)[
i
];
BoundaryType
type
=
types
[
i
];
periodicDof
[
type
][
globalDofIndex
]
=
mapGlobalDofIndex
;
perDofAssociations
[
globalDofIndex
].
insert
(
type
);
dofFromRank
[
globalDofIndex
].
insert
(
it
->
first
);
}
}
if
(
dofFromRank
.
size
()
>
0
)
TEST_EXIT_DBG
(
mesh
->
getDim
()
==
2
)
(
"Periodic boundary corner problem must be generalized to 3d!
\n
"
);
MPI
::
Request
request
[
min
(
static_cast
<
int
>
(
periodicBoundary
.
boundary
.
size
()
*
2
),
4
)];
int
requestCounter
=
0
;
std
::
vector
<
int
*>
sendBuffers
,
recvBuffers
;
for
(
std
::
map
<
DegreeOfFreedom
,
std
::
set
<
int
>
>::
iterator
it
=
dofFromRank
.
begin
();
it
!=
dofFromRank
.
end
();
++
it
)
{
if
(
it
->
second
.
size
()
==
2
)
{
TEST_EXIT_DBG
(
perDofAssociations
[
it
->
first
].
size
()
==
2
)
(
"Missing periodic dof!
\n
"
);
int
type0
=
*
(
perDofAssociations
[
it
->
first
].
begin
());
int
type1
=
*
(
++
(
perDofAssociations
[
it
->
first
].
begin
()));
int
*
sendbuf
=
new
int
[
2
];
sendbuf
[
0
]
=
periodicDof
[
type0
][
it
->
first
];
sendbuf
[
1
]
=
periodicDof
[
type1
][
it
->
first
];
request
[
requestCounter
++
]
=
mpiComm
.
Isend
(
sendbuf
,
2
,
MPI_INT
,
*
(
it
->
second
.
begin
()),
0
);
request
[
requestCounter
++
]
=
mpiComm
.
Isend
(
sendbuf
,
2
,
MPI_INT
,
*
(
++
(
it
->
second
.
begin
())),
0
);
sendBuffers
.
push_back
(
sendbuf
);
int
*
recvbuf1
=
new
int
[
2
];
int
*
recvbuf2
=
new
int
[
2
];
request
[
requestCounter
++
]
=
mpiComm
.
Irecv
(
recvbuf1
,
2
,
MPI_INT
,
*
(
it
->
second
.
begin
()),
0
);
request
[
requestCounter
++
]
=
mpiComm
.
Irecv
(
recvbuf2
,
2
,
MPI_INT
,
*
(
++
(
it
->
second
.
begin
())),
0
);
recvBuffers
.
push_back
(
recvbuf1
);
recvBuffers
.
push_back
(
recvbuf2
);
}
}
for
(
PeriodicDofMap
::
iterator
it
=
periodicDof
.
begin
();
it
!=
periodicDof
.
end
();
++
it
)
if
(
it
->
second
.
size
()
==
2
)
periodicDof
.
erase
(
it
++
);
MPI
::
Request
::
Waitall
(
requestCounter
,
request
);
int
i
=
0
;
for
(
std
::
map
<
DegreeOfFreedom
,
std
::
set
<
int
>
>::
iterator
it
=
dofFromRank
.
begin
();
it
!=
dofFromRank
.
end
();
++
it
)
{
if
(
it
->
second
.
size
()
==
2
)
{
for
(
int
k
=
0
;
k
<
2
;
k
++
)
for
(
int
j
=
0
;
j
<
2
;
j
++
)
if
(
recvBuffers
[
i
+
k
][
j
]
!=
it
->
first
)
{
int
globalDofIndex
=
it
->
first
;
int
mapGlobalDofIndex
=
recvBuffers
[
i
+
k
][
j
];
int
type
=
3
;
periodicDof
[
type
][
globalDofIndex
]
=
mapGlobalDofIndex
;
perDofAssociations
[
globalDofIndex
].
insert
(
type
);
}
i
++
;
}
}
for
(
unsigned
int
i
=
0
;
i
<
sendBuffers
.
size
();
i
++
)
delete
[]
sendBuffers
[
i
];
for
(
unsigned
int
i
=
0
;
i
<
recvBuffers
.
size
();
i
++
)
delete
[]
recvBuffers
[
i
];
#if (DEBUG != 0)
for
(
std
::
map
<
int
,
std
::
set
<
int
>
>::
iterator
it
=
perDofAssociations
.
begin
();
it
!=
perDofAssociations
.
end
();
++
it
)
{
int
nAssoc
=
it
->
second
.
size
();
TEST_EXIT_DBG
(
nAssoc
==
1
||
nAssoc
==
3
)
(
"Should not happen! DOF %d has %d periodic associations!
\n
"
,
it
->
first
,
nAssoc
);
}
#endif
}
...
...
@@ -2049,6 +2146,7 @@ namespace AMDiS {
serialize
(
out
,
vertexDof
);
serialize
(
out
,
periodicDof
);
serialize
(
out
,
perDofAssociations
);
SerUtil
::
serialize
(
out
,
rstart
);
SerUtil
::
serialize
(
out
,
macroElementStructureConsisten
);
...
...
@@ -2098,6 +2196,7 @@ namespace AMDiS {
deserialize
(
in
,
vertexDof
,
dofMap
);
deserialize
(
in
,
periodicDof
);
deserialize
(
in
,
perDofAssociations
);
SerUtil
::
deserialize
(
in
,
rstart
);
SerUtil
::
deserialize
(
in
,
macroElementStructureConsisten
);
...
...
@@ -2112,10 +2211,25 @@ namespace AMDiS {
SerUtil
::
serialize
(
out
,
mapSize
);
for
(
PeriodicDofMap
::
iterator
it
=
data
.
begin
();
it
!=
data
.
end
();
++
it
)
{
DegreeOfFreedom
dof
=
it
->
first
;
std
::
set
<
DegreeOfFreedom
>
dofSet
=
it
->
second
;
int
type
=
it
->
first
;
DofMapping
dofMap
=
it
->
second
;
SerUtil
::
serialize
(
out
,
type
);
SerUtil
::
serialize
(
out
,
dofMap
);
}
}
void
MeshDistributor
::
serialize
(
std
::
ostream
&
out
,
std
::
map
<
int
,
std
::
set
<
int
>
>&
data
)
{
int
mapSize
=
data
.
size
();
SerUtil
::
serialize
(
out
,
mapSize
);
for
(
std
::
map
<
int
,
std
::
set
<
int
>
>::
iterator
it
=
data
.
begin
();
it
!=
data
.
end
();
++
it
)
{
int
dof
=
it
->
first
;
std
::
set
<
int
>
typeSet
=
it
->
second
;
SerUtil
::
serialize
(
out
,
dof
);
SerUtil
::
serialize
(
out
,
dof
Set
);
SerUtil
::
serialize
(
out
,
type
Set
);
}
}
...
...
@@ -2128,14 +2242,34 @@ namespace AMDiS {
SerUtil
::
deserialize
(
in
,
mapSize
);
for
(
int
i
=
0
;
i
<
mapSize
;
i
++
)
{
DegreeOfFreedom
dof
=
0
;
std
::
set
<
DegreeOfFreedom
>
dofSet
;
int
type
;
DofMapping
dofMap
;
SerUtil
::
deserialize
(
in
,
type
);
SerUtil
::
deserialize
(
in
,
dofMap
);
data
[
type
]
=
dofMap
;
}
}
void
MeshDistributor
::
deserialize
(
std
::
istream
&
in
,
std
::
map
<
int
,
std
::
set
<
int
>
>&
data
)
{
data
.
clear
();
int
mapSize
=
0
;
SerUtil
::
deserialize
(
in
,
mapSize
);
for
(
int
i
=
0
;
i
<
mapSize
;
i
++
)
{
int
dof
;
std
::
set
<
int
>
typeSet
;
SerUtil
::
deserialize
(
in
,
dof
);
SerUtil
::
deserialize
(
in
,
dof
Set
);
SerUtil
::
deserialize
(
in
,
type
Set
);
data
[
dof
]
=
dof
Set
;
}
data
[
dof
]
=
type
Set
;
}
}
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
23333bca
...
...
@@ -68,7 +68,7 @@ namespace AMDiS {
typedef
std
::
map
<
const
DegreeOfFreedom
*
,
DegreeOfFreedom
>
DofIndexMap
;
typedef
std
::
map
<
DegreeOfFreedom
,
std
::
set
<
DegreeOfFreedom
>
>
PeriodicDofMap
;
typedef
std
::
map
<
int
,
DofMapping
>
PeriodicDofMap
;
typedef
std
::
vector
<
MeshStructure
>
MeshCodeVec
;
...
...
@@ -133,14 +133,27 @@ namespace AMDiS {
return
mapLocalDofIndex
[
dof
];
}
inline
std
::
set
<
DegreeOfFreedom
>&
getPeriodicDof
(
DegreeOfFreedom
dof
)
inline
int
getPeriodicMapping
(
int
type
,
int
globalDofIndex
)
{
return
periodicDof
[
dof
];
TEST_EXIT_DBG
(
periodicDof
[
type
].
count
(
globalDofIndex
)
==
1
)
(
"Should not happen!
\n
"
);
return
periodicDof
[
type
][
globalDofIndex
];
}
inline
std
::
set
<
int
>&
getPerDofAssociations
(
int
globalDofIndex
)
{
return
perDofAssociations
[
globalDofIndex
];
}
inline
PeriodicDof
Map
&
getPeriodicDofMap
(
)
inline
bool
is
PeriodicDof
(
int
globalDofIndex
)
{
return
periodicDof
;
return
(
perDofAssociations
.
count
(
globalDofIndex
)
>
0
);
}
inline
bool
isPeriodicDof
(
int
globalDofIndex
,
int
type
)
{
return
(
periodicDof
[
type
].
count
(
globalDofIndex
)
>
0
);
}
inline
bool
getIsRankDof
(
DegreeOfFreedom
dof
)
...
...
@@ -336,9 +349,13 @@ namespace AMDiS {
/// Writes a periodic dof mapping to an output stream.
void
serialize
(
std
::
ostream
&
out
,
PeriodicDofMap
&
data
);
void
serialize
(
std
::
ostream
&
out
,
std
::
map
<
int
,
std
::
set
<
int
>
>&
data
);
/// Reads a periodic dof mapping from an input stream.
void
deserialize
(
std
::
istream
&
in
,
PeriodicDofMap
&
data
);
void
deserialize
(
std
::
istream
&
in
,
std
::
map
<
int
,
std
::
set
<
int
>
>&
data
);
/// Writes a mapping from dof pointers to some values to an output stream.
template
<
typename
T
>
void
serialize
(
std
::
ostream
&
out
,
std
::
map
<
const
DegreeOfFreedom
*
,
T
>
&
data
)
...
...
@@ -501,6 +518,8 @@ namespace AMDiS {
*/
PeriodicDofMap
periodicDof
;
std
::
map
<
int
,
std
::
set
<
int
>
>
perDofAssociations
;
/// Is the index of the first row of the linear system, which is owned by the rank.
int
rstart
;
...
...
AMDiS/src/parallel/ParallelDebug.cc
View file @
23333bca
...
...
@@ -374,6 +374,9 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDebug::printMapPeriodic()"
);
ERROR_EXIT
(
"Function must be rewritten!
\n
"
);
#if 0
typedef std::map<DegreeOfFreedom, DegreeOfFreedom> DofMapping;
typedef std::map<DegreeOfFreedom, std::set<DegreeOfFreedom> > PeriodicDofMap;
...
...
@@ -401,6 +404,7 @@ namespace AMDiS {
coords.print();
}
}
#endif
}
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment