Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
2c96156e
Commit
2c96156e
authored
May 11, 2012
by
Thomas Witkowski
Browse files
Blub
parent
a2e41205
Changes
56
Show whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/CheckerPartitioner.cc
View file @
2c96156e
...
...
@@ -43,6 +43,11 @@ namespace AMDiS {
}
if
(
multilevel
)
{
TEST_EXIT
(
MPI
::
COMM_WORLD
.
Get_size
()
==
16
)
(
"Multilevel partitioning is implemented for 16 nodes only!
\n
"
);
}
int
dim
=
mesh
->
getDim
();
TraverseStack
stack
;
ElInfo
*
elInfo
=
stack
.
traverseFirst
(
mesh
,
0
,
Mesh
::
CALL_EL_LEVEL
);
...
...
@@ -54,7 +59,13 @@ namespace AMDiS {
switch
(
mode
)
{
case
0
:
if
(
!
multilevel
)
{
elInRank
=
boxIndex
;
}
else
{
TEST_EXIT_DBG
(
boxIndex
>=
0
&&
boxIndex
<=
15
)(
"Wrong box index!
\n
"
);
int
rs
[
16
]
=
{
0
,
1
,
4
,
5
,
2
,
3
,
6
,
7
,
8
,
9
,
12
,
13
,
10
,
11
,
14
,
15
};
elInRank
=
rs
[
boxIndex
];
}
break
;
case
1
:
...
...
AMDiS/src/parallel/CheckerPartitioner.h
View file @
2c96156e
...
...
@@ -38,22 +38,27 @@ namespace AMDiS {
CheckerPartitioner
(
MPI
::
Intracomm
*
comm
)
:
MeshPartitioner
(
comm
),
mpiRank
(
mpiComm
->
Get_rank
()),
mpiSize
(
mpiComm
->
Get_size
())
mpiSize
(
mpiComm
->
Get_size
()),
mode
(
0
),
multilevel
(
false
)
{
string
modestr
=
""
;
Parameters
::
get
(
"parallel->partitioner->mode"
,
modestr
);
if
(
modestr
==
""
)
mode
=
0
;
else
if
(
modestr
==
"x-stripes"
)
if
(
modestr
==
"x-stripes"
)
mode
=
1
;
else
if
(
modestr
==
"y-stripes"
)
mode
=
2
;
else
if
(
modestr
==
"z-stripes"
)
mode
=
3
;
else
else
if
(
modestr
==
"multilevel"
)
multilevel
=
true
;
else
{
if
(
modestr
!=
""
)
{
ERROR_EXIT
(
"No partitioner mode
\"
%s
\"
!
\n
"
,
modestr
.
c_str
());
}
}
}
~
CheckerPartitioner
()
{}
...
...
@@ -77,6 +82,8 @@ namespace AMDiS {
/// 1: x-stripes: each node gets one x-stripe of boxes
/// 2: y-stripes: each node gets one y-stripe of boxes
int
mode
;
bool
multilevel
;
};
}
...
...
AMDiS/src/parallel/DofComm.cc
View file @
2c96156e
...
...
@@ -91,22 +91,29 @@ namespace AMDiS {
int
DofComm
::
getNumberDofs
(
LevelDataType
&
data
,
int
level
,
const
FiniteElemSpace
*
feSpace
)
const
FiniteElemSpace
*
feSpace
,
bool
countDouble
)
{
FUNCNAME
(
"DofComm::getNumberDofs()"
);
TEST_EXIT_DBG
(
level
<
data
.
size
())(
"Should not happen!
\n
"
);
DofContainerSet
dofs
;
DofContainerSet
dofSet
;
DofContainer
dofVec
;
for
(
DataIter
rankIt
=
data
[
level
].
begin
();
rankIt
!=
data
[
level
].
end
();
++
rankIt
)
for
(
FeMapIter
feIt
=
rankIt
->
second
.
begin
();
feIt
!=
rankIt
->
second
.
end
();
++
feIt
)
if
(
feIt
->
first
==
feSpace
)
dofs
.
insert
(
feIt
->
second
.
begin
(),
feIt
->
second
.
end
());
if
(
countDouble
)
dofVec
.
insert
(
dofVec
.
end
(),
feIt
->
second
.
begin
(),
feIt
->
second
.
end
());
else
dofSet
.
insert
(
feIt
->
second
.
begin
(),
feIt
->
second
.
end
());
return
static_cast
<
int
>
(
dofs
.
size
());
if
(
countDouble
)
return
static_cast
<
int
>
(
dofVec
.
size
());
return
static_cast
<
int
>
(
dofSet
.
size
());
}
...
...
AMDiS/src/parallel/DofComm.h
View file @
2c96156e
...
...
@@ -87,7 +87,8 @@ namespace AMDiS {
int
getNumberDofs
(
LevelDataType
&
data
,
int
level
,
const
FiniteElemSpace
*
feSpace
);
const
FiniteElemSpace
*
feSpace
,
bool
countDouble
=
false
);
protected:
void
createContainer
(
RankToBoundMap
&
boundary
,
LevelDataType
&
data
);
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
2c96156e
...
...
@@ -176,7 +176,8 @@ namespace AMDiS {
createBoundaryDofs
();
#if (DEBUG != 0)
ParallelDebug
::
writeDebugFile
(
*
this
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
ParallelDebug
::
writeDebugFile
(
feSpaces
[
feSpaces
.
size
()
-
1
],
dofMap
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
#endif
initialized
=
true
;
...
...
@@ -501,13 +502,18 @@ namespace AMDiS {
}
void
MeshDistributor
::
synchVector
(
SystemVector
&
vec
)
void
MeshDistributor
::
synchVector
(
SystemVector
&
vec
,
int
level
)
{
FUNCNAME
(
"MeshDistributor::synchVector()"
);
StdMpi
<
vector
<
double
>
>
stdMpi
(
mpiComm
);
TEST_EXIT_DBG
(
level
>=
0
&&
level
<=
1
)(
"Wrong level number!
\n
"
);
for
(
DofComm
::
Iterator
it
(
dofComm
.
getSendDofs
());
MPI
::
Intracomm
&
levelComm
=
levelData
.
getMpiComm
(
level
);
DofComm
&
dc
=
(
level
==
0
?
dofComm
:
dofCommSd
);
StdMpi
<
vector
<
double
>
>
stdMpi
(
levelComm
);
for
(
DofComm
::
Iterator
it
(
dc
.
getSendDofs
());
!
it
.
end
();
it
.
nextRank
())
{
vector
<
double
>
dofs
;
...
...
@@ -518,22 +524,33 @@ namespace AMDiS {
dofs
.
push_back
(
dofVec
[
it
.
getDofIndex
()]);
}
stdMpi
.
send
(
it
.
getRank
(),
dofs
);
int
rank
=
it
.
getRank
();
if
(
level
>
0
)
rank
=
levelData
.
mapRank
(
rank
,
0
,
level
);
stdMpi
.
send
(
rank
,
dofs
);
}
for
(
DofComm
::
Iterator
it
(
dofComm
.
getRecvDofs
());
!
it
.
end
();
it
.
nextRank
())
stdMpi
.
recv
(
it
.
getRank
());
for
(
DofComm
::
Iterator
it
(
dc
.
getRecvDofs
());
!
it
.
end
();
it
.
nextRank
())
{
int
rank
=
it
.
getRank
();
if
(
level
>
0
)
rank
=
levelData
.
mapRank
(
rank
,
0
,
level
);
stdMpi
.
recv
(
rank
);
}
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
dofComm
.
getRecvDofs
());
!
it
.
end
();
it
.
nextRank
())
{
for
(
DofComm
::
Iterator
it
(
dc
.
getRecvDofs
());
!
it
.
end
();
it
.
nextRank
())
{
int
rank
=
it
.
getRank
();
if
(
level
>
0
)
rank
=
levelData
.
mapRank
(
rank
,
0
,
level
);
int
counter
=
0
;
for
(
int
i
=
0
;
i
<
vec
.
getSize
();
i
++
)
{
DOFVector
<
double
>
&
dofVec
=
*
(
vec
.
getDOFVector
(
i
));
for
(
it
.
beginDofIter
(
vec
.
getFeSpace
(
i
));
!
it
.
endDofIter
();
it
.
nextDof
())
dofVec
[
it
.
getDofIndex
()]
=
stdMpi
.
getRecvData
(
it
.
getR
ank
()
)[
counter
++
];
dofVec
[
it
.
getDofIndex
()]
=
0.0
;
//
stdMpi.getRecvData(
r
ank)[counter++];
}
}
}
...
...
@@ -777,60 +794,60 @@ namespace AMDiS {
std
::
set
<
int
>
neighbours
;
switch
(
mpiRank
)
{
case
0
:
neighbours
.
insert
(
1
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
break
;
case
1
:
neighbours
.
insert
(
0
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
0
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
6
);
break
;
case
2
:
neighbours
.
insert
(
1
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
0
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
break
;
case
3
:
neighbours
.
insert
(
2
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
0
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
12
);
break
;
case
4
:
neighbours
.
insert
(
0
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
5
);
break
;
case
5
:
neighbours
.
insert
(
0
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
10
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
break
;
case
6
:
neighbours
.
insert
(
1
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
1
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
break
;
case
7
:
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
break
;
case
8
:
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
break
;
case
9
:
neighbours
.
insert
(
4
);
neighbours
.
insert
(
5
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
neighbours
.
insert
(
14
);
neighbours
.
insert
(
2
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
14
);
break
;
case
10
:
neighbours
.
insert
(
5
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
11
);
neighbours
.
insert
(
13
);
neighbours
.
insert
(
14
);
neighbours
.
insert
(
15
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
11
);
break
;
case
11
:
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
4
);
neighbours
.
insert
(
1
5
);
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
4
);
break
;
case
12
:
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
13
);
neighbours
.
insert
(
3
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
13
);
neighbours
.
insert
(
11
);
neighbours
.
insert
(
14
);
neighbours
.
insert
(
15
);
break
;
case
13
:
neighbours
.
insert
(
8
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
4
);
neighbours
.
insert
(
6
);
neighbours
.
insert
(
7
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
4
);
neighbours
.
insert
(
1
5
);
break
;
case
14
:
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
1
3
);
neighbours
.
insert
(
15
);
neighbours
.
insert
(
9
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
15
);
break
;
case
15
:
neighbours
.
insert
(
1
0
);
neighbours
.
insert
(
1
1
);
neighbours
.
insert
(
14
);
neighbours
.
insert
(
1
2
);
neighbours
.
insert
(
1
3
);
neighbours
.
insert
(
14
);
break
;
}
...
...
@@ -844,31 +861,31 @@ namespace AMDiS {
Parameters
::
get
(
"parallel->multi level test"
,
multiLevelTest
);
if
(
multiLevelTest
)
{
switch
(
mpiRank
)
{
case
0
:
case
1
:
case
4
:
case
5
:
case
0
:
case
1
:
case
2
:
case
3
:
{
std
::
set
<
int
>
m
;
m
.
insert
(
0
);
m
.
insert
(
1
);
m
.
insert
(
4
);
m
.
insert
(
5
);
m
.
insert
(
0
);
m
.
insert
(
1
);
m
.
insert
(
2
);
m
.
insert
(
3
);
levelData
.
addLevel
(
m
,
0
);
}
break
;
case
2
:
case
3
:
case
6
:
case
7
:
case
4
:
case
5
:
case
6
:
case
7
:
{
std
::
set
<
int
>
m
;
m
.
insert
(
2
);
m
.
insert
(
3
);
m
.
insert
(
6
);
m
.
insert
(
7
);
m
.
insert
(
4
);
m
.
insert
(
5
);
m
.
insert
(
6
);
m
.
insert
(
7
);
levelData
.
addLevel
(
m
,
1
);
}
break
;
case
8
:
case
9
:
case
1
2
:
case
1
3
:
case
8
:
case
9
:
case
1
0
:
case
1
1
:
{
std
::
set
<
int
>
m
;
m
.
insert
(
8
);
m
.
insert
(
9
);
m
.
insert
(
1
2
);
m
.
insert
(
1
3
);
m
.
insert
(
8
);
m
.
insert
(
9
);
m
.
insert
(
1
0
);
m
.
insert
(
1
1
);
levelData
.
addLevel
(
m
,
2
);
}
break
;
case
1
0
:
case
1
1
:
case
14
:
case
15
:
case
1
2
:
case
1
3
:
case
14
:
case
15
:
{
std
::
set
<
int
>
m
;
m
.
insert
(
1
0
);
m
.
insert
(
1
1
);
m
.
insert
(
14
);
m
.
insert
(
15
);
m
.
insert
(
1
2
);
m
.
insert
(
1
3
);
m
.
insert
(
14
);
m
.
insert
(
15
);
levelData
.
addLevel
(
m
,
3
);
}
break
;
...
...
@@ -1672,7 +1689,8 @@ namespace AMDiS {
debug
::
writeElementIndexMesh
(
mesh
,
debugOutputDir
+
"elementIndex-"
+
lexical_cast
<
string
>
(
mpiRank
)
+
".vtu"
);
ParallelDebug
::
writeDebugFile
(
*
this
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
ParallelDebug
::
writeDebugFile
(
feSpaces
[
feSpaces
.
size
()
-
1
],
dofMap
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
debug
::
testSortedDofs
(
mesh
,
elMap
);
ParallelDebug
::
testCommonDofs
(
*
this
,
true
);
ParallelDebug
::
testGlobalIndexByCoords
(
*
this
);
...
...
@@ -1680,7 +1698,8 @@ namespace AMDiS {
int
tmp
=
0
;
Parameters
::
get
(
name
+
"->write parallel debug file"
,
tmp
);
if
(
tmp
)
ParallelDebug
::
writeDebugFile
(
*
this
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
ParallelDebug
::
writeDebugFile
(
feSpaces
[
feSpaces
.
size
()
-
1
],
dofMap
,
debugOutputDir
+
"mpi-dbg"
,
"dat"
);
#endif
}
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
2c96156e
...
...
@@ -254,12 +254,10 @@ namespace AMDiS {
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()];
}
/** \brief
* Works in the same way as the function above defined for DOFVectors. Due
* to performance, this function does not call \ref synchVector for each
* DOFVector, but instead sends all values of all DOFVectors all at once.
*/
void
synchVector
(
SystemVector
&
vec
);
/// Works in the same way as the function above defined for DOFVectors. Due
/// to performance, this function does not call \ref synchVector for each
/// DOFVector, but instead sends all values of all DOFVectors all at once.
void
synchVector
(
SystemVector
&
vec
,
int
level
=
0
);
void
check3dValidMesh
();
...
...
AMDiS/src/parallel/MeshLevelData.cc
View file @
2c96156e
...
...
@@ -70,11 +70,13 @@ namespace AMDiS {
void
MeshLevelData
::
serialize
(
ostream
&
out
)
{
ERROR_EXIT
(
"Not yet implemented!
\n
"
);
}
void
MeshLevelData
::
deserialize
(
istream
&
in
)
{
ERROR_EXIT
(
"Not yet implemented!
\n
"
);
}
...
...
AMDiS/src/parallel/MeshLevelData.h
View file @
2c96156e
...
...
@@ -92,16 +92,16 @@ namespace AMDiS {
TEST_EXIT_DBG
(
level
==
1
&&
rank
<=
15
)(
"Should not happen!
\n
"
);
TEST_EXIT_DBG
(
MPI
::
COMM_WORLD
.
Get_size
()
==
16
)(
"Should not happen!
\n
"
);
if
(
rank
==
0
||
rank
==
1
||
rank
==
4
||
rank
==
5
)
if
(
rank
==
0
||
rank
==
1
||
rank
==
2
||
rank
==
3
)
return
0
;
if
(
rank
==
2
||
rank
==
3
||
rank
==
6
||
rank
==
7
)
if
(
rank
==
4
||
rank
==
5
||
rank
==
6
||
rank
==
7
)
return
1
;
if
(
rank
==
8
||
rank
==
9
||
rank
==
1
2
||
rank
==
1
3
)
if
(
rank
==
8
||
rank
==
9
||
rank
==
1
0
||
rank
==
1
1
)
return
2
;
if
(
rank
==
1
0
||
rank
==
1
1
||
rank
==
14
||
rank
==
15
)
if
(
rank
==
1
2
||
rank
==
1
3
||
rank
==
14
||
rank
==
15
)
return
3
;
ERROR_EXIT
(
"Should not happen!
\n
"
);
...
...
AMDiS/src/parallel/ParallelDebug.cc
View file @
2c96156e
...
...
@@ -758,25 +758,27 @@ namespace AMDiS {
}
void
ParallelDebug
::
writeDebugFile
(
MeshDistributor
&
pdb
,
string
prefix
,
string
postfix
)
void
ParallelDebug
::
writeDebugFile
(
const
FiniteElemSpace
*
feSpace
,
ParallelDofMapping
&
dofMap
,
string
prefix
,
string
postfix
)
{
FUNCNAME
(
"ParallelDebug::writeCoordsFile()"
);
const
FiniteElemSpace
*
feSpace
=
pdb
.
feSpaces
[
pdb
.
feSpaces
.
size
()
-
1
]
;
Mesh
*
mesh
=
feSpace
->
getMesh
()
;
stringstream
filename
;
filename
<<
prefix
<<
"-"
<<
pdb
.
mpiR
ank
<<
"."
<<
postfix
;
filename
<<
prefix
<<
"-"
<<
MPI
::
COMM_WORLD
.
Get_r
ank
()
<<
"."
<<
postfix
;
DOFVector
<
WorldVector
<
double
>
>
coords
(
feSpace
,
"tmp"
);
pdb
.
mesh
->
getDofIndexCoords
(
feSpace
,
coords
);
mesh
->
getDofIndexCoords
(
feSpace
,
coords
);
typedef
map
<
int
,
vector
<
DegreeOfFreedom
>
>
ElDofMap
;
ElDofMap
elDofMap
;
TraverseStack
stack
;
const
BasisFunction
*
basisFcts
=
feSpace
->
getBasisFcts
();
vector
<
DegreeOfFreedom
>
localIndices
(
basisFcts
->
getNumber
());
ElInfo
*
elInfo
=
stack
.
traverseFirst
(
pdb
.
mesh
,
-
1
,
Mesh
::
CALL_LEAF_EL
);
ElInfo
*
elInfo
=
stack
.
traverseFirst
(
mesh
,
-
1
,
Mesh
::
CALL_LEAF_EL
);
while
(
elInfo
)
{
basisFcts
->
getLocalIndices
(
elInfo
->
getElement
(),
feSpace
->
getAdmin
(),
localIndices
);
...
...
@@ -789,17 +791,20 @@ namespace AMDiS {
ofstream
file
;
file
.
open
(
filename
.
str
().
c_str
());
// file << "# First line contains number of DOFs, than each line has the format\n";
// file << "# Local DOF index Global DOF index Is rank DOF x-coord y-coord z-coord\n";
file
<<
coords
.
getUsedS
ize
()
<<
"
\n
"
;
// file << "#
DOF index
Local DOF index Global DOF index Is rank DOF x-coord y-coord z-coord\n";
file
<<
dofMap
[
feSpace
].
s
ize
()
<<
"
\n
"
;
DOFIterator
<
WorldVector
<
double
>
>
it
(
&
coords
,
USED_DOFS
);
for
(
it
.
reset
();
!
it
.
end
();
++
it
)
{
if
(
dofMap
[
feSpace
].
isSet
(
it
.
getDOFIndex
()))
{
file
<<
it
.
getDOFIndex
()
<<
" "
<<
pdb
.
dofMap
[
feSpace
][
it
.
getDOFIndex
()].
global
<<
" "
<<
pdb
.
dofMap
[
feSpace
].
isRankDof
(
it
.
getDOFIndex
());
for
(
int
i
=
0
;
i
<
pdb
.
mesh
->
getDim
();
i
++
)
<<
dofMap
[
feSpace
][
it
.
getDOFIndex
()].
local
<<
" "
<<
dofMap
[
feSpace
][
it
.
getDOFIndex
()].
global
<<
" "
<<
dofMap
[
feSpace
].
isRankDof
(
it
.
getDOFIndex
());
for
(
int
i
=
0
;
i
<
mesh
->
getDim
();
i
++
)
file
<<
" "
<<
(
*
it
)[
i
];
file
<<
"
\n
"
;
}
}
// === Write to all elements in ranks mesh the included dofs. ===
...
...
AMDiS/src/parallel/ParallelDebug.h
View file @
2c96156e
...
...
@@ -28,13 +28,15 @@
namespace
AMDiS
{
using
namespace
std
;
class
ParallelDebug
{
protected:
typedef
std
::
vector
<
WorldVector
<
double
>
>
CoordsVec
;
typedef
vector
<
WorldVector
<
double
>
>
CoordsVec
;
/// Defines a mapping type from rank numbers to sets of coordinates.
typedef
std
::
map
<
int
,
CoordsVec
>
RankToCoords
;
typedef
map
<
int
,
CoordsVec
>
RankToCoords
;
public:
/** \brief
...
...
@@ -154,8 +156,11 @@ namespace AMDiS {
int
level
=
0
,
bool
force
=
false
);
static
void
writeDebugFile
(
MeshDistributor
&
pdb
,
std
::
string
prefix
,
std
::
string
postfix
);
static
void
writeDebugFile
(
const
FiniteElemSpace
*
feSpace
,
ParallelDofMapping
&
dofMap
,
string
prefix
,
string
postfix
);
/** \brief
* This functions create a Paraview file with the macro mesh where the
...
...
@@ -172,7 +177,7 @@ namespace AMDiS {
* added to the filename.
* \param[in] feSpace
*/
static
void
writePartitioningFile
(
std
::
string
filename
,
static
void
writePartitioningFile
(
string
filename
,
int
counter
,
const
FiniteElemSpace
*
feSpace
);
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
2c96156e
...
...
@@ -330,12 +330,13 @@ namespace AMDiS {
if
(
fetiPreconditioner
!=
FETI_NONE
)
interiorDofMap
.
update
();
for
(
unsigned
int
i
=
0
;
i
<
meshDistributor
->
getFeSpaces
().
size
();
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
i
);
createLagrange
(
feSpace
);
}
lagrangeMap
.
update
();
lagrangeMap
.
update
();
// === ===
...
...
@@ -415,13 +416,19 @@ namespace AMDiS {
it
!=
vertices
.
end
();
++
it
)
{
WorldVector
<
double
>
c
;
feSpace
->
getMesh
()
->
getDofIndexCoords
(
*
it
,
feSpace
,
c
);
MSG
(
"COORD %f %f
\n
"
,
c
[
0
],
c
[
1
]);
double
e
=
1e-8
;
if
(
meshLevel
==
0
)
{
primals
.
insert
(
**
it
);
}
else
{
if
(
fabs
(
c
[
0
])
<
e
||
fabs
(
c
[
1
])
<
e
||
fabs
(
c
[
0
]
-
25.0
)
<
e
||
fabs
(
c
[
1
]
-
25.0
)
<
e
||
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
))
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
))
{
MSG
(
"PRIMAL COORD %f %f
\n
"
,
c
[
0
],
c
[
1
]);
primals
.
insert
(
**
it
);
}
}
}
// === Calculate the number of primals that are owned by the rank and ===
...
...
@@ -462,24 +469,12 @@ namespace AMDiS {
boundaryDofRanks
[
feSpace
].
clear
();
if
(
dualDofMap
[
feSpace
].
nLocalDofs
==
0
)
return
;
// === Create for each dual node that is owned by the rank, the set ===
// === of ranks that contain this node (denoted by W(x_j)). ===
// Stores for all rank own communication DOFs, if the counterpart is
// a rank owmed DOF in its subdomain. Thus, the following map stores to
// each rank number all DOFS that fulfill this requirenment.
map
<
int
,
std
::
set
<
DegreeOfFreedom
>
>
sdRankDofs
;
if
(
meshLevel
==
0
)
{
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getSendDofs
(),
feSpace
);
!
it
.
end
();
it
.
nextRank
())
{
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
if
(
!
isPrimal
(
feSpace
,
it
.
getDofIndex
()))
{
boundaryDofRanks
[
feSpace
][
it
.
getDofIndex
()].
insert
(
mpiRank
);
boundaryDofRanks
[
feSpace
][
it
.
getDofIndex
()].
insert
(
it
.
getRank
());
}
}
}
}
else
{
if
(
meshLevel
>
0
)
{
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
);
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getRecvDofs
(),
...
...
@@ -490,16 +485,11 @@ namespace AMDiS {
subdomainRankDofs
.
reserve
(
it
.
getDofs
().
size
());
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
if
(
meshDistributor
->
getDofMapSd
()[
feSpace
].
isRankDof
(
it
.
getDofIndex
()))
{
MSG
(
"IN SD DOF
\n
"
);
if
(
meshDistributor
->
getDofMapSd
()[
feSpace
].
isRankDof
(
it
.
getDofIndex
()))
subdomainRankDofs
.
push_back
(
1
);
}
else
{
MSG
(
"NOT IN SD DOF
\n
"
);
else
subdomainRankDofs
.
push_back
(
0
);
}
}
MSG
(
"SEND SIZE %d TO RANK %d
\n
"
,
subdomainRankDofs
.
size
(),
it
.
getRank
());
stdMpi
.
send
(
it
.
getRank
(),
subdomainRankDofs
);
}
...
...
@@ -511,6 +501,23 @@ namespace AMDiS {
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getDofComm
().
getSendDofs
(),
meshLevel
,
feSpace
);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
if
(
!
isPrimal
(
feSpace
,
it
.
getDofIndex
()))
if
(
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()]
==
1
)