Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
10
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Open sidebar
Backofen, Rainer
amdis
Commits
8d9574c0
Commit
8d9574c0
authored
Oct 19, 2009
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Make periodic boundaries working also for parallel domain problems.
parent
42cd169f
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
312 additions
and
102 deletions
+312
-102
AMDiS/src/ParallelDomainBase.cc
AMDiS/src/ParallelDomainBase.cc
+273
-100
AMDiS/src/ParallelDomainBase.h
AMDiS/src/ParallelDomainBase.h
+32
-2
AMDiS/src/ParallelDomainVec.cc
AMDiS/src/ParallelDomainVec.cc
+5
-0
AMDiS/src/ParallelDomainVec.h
AMDiS/src/ParallelDomainVec.h
+2
-0
No files found.
AMDiS/src/ParallelDomainBase.cc
View file @
8d9574c0
...
...
@@ -108,7 +108,7 @@ namespace AMDiS {
// === Create interior boundary information ===
createInteriorBoundaryInfo
(
rankDofs
);
createInteriorBoundaryInfo
();
// === Remove all macro elements that are not part of the rank partition. ===
...
...
@@ -123,9 +123,10 @@ namespace AMDiS {
updateDofAdmins
();
// === ===
// === Create periodic dof mapping, if there are periodic boundaries. ===
createPeriodicMap
();
// createPeriodicMap();
// === Global refinements. ===
...
...
@@ -148,49 +149,17 @@ namespace AMDiS {
DbgTestElementMap
(
elMap
);
#endif
// === Update periodic mapping, if there are periodic boundaries. ===
createPeriodicMap
();
}
#if (DEBUG != 0)
//
DbgTestCommonDofs(true);
DbgTestCommonDofs
(
true
);
#endif
nRankRows
=
nRankDofs
*
nComponents
;
nOverallRows
=
nOverallDOFs
*
nComponents
;
#if 0
DOFVector<double> *tmp = new DOFVector<double>(feSpace, "tmp");
tmp->set(0.0);
if (mpiRank == 0)
VtkWriter::writeFile(tmp, "test0.vtu");
else
VtkWriter::writeFile(tmp, "test1.vtu");
if (mpiRank == 1) {
for (PeriodicDofMap::iterator it = periodicDof.begin();
it != periodicDof.end(); ++it) {
std::cout << "DOF MAP " << it->first << ": ";
for (std::set<DegreeOfFreedom>::iterator dofit = it->second.begin();
dofit != it->second.end(); ++dofit)
std::cout << *dofit << " ";
std::cout << std::endl;
DegreeOfFreedom localdof;
for (DofMapping::iterator dofIt = mapLocalGlobalDOFs.begin();
dofIt != mapLocalGlobalDOFs.end(); ++dofIt)
if (dofIt->second == it->first)
localdof = dofIt->first;
WorldVector<double> coords;
mesh->getDofIndexCoords(localdof, feSpace, coords);
coords.print();
}
}
#endif
if
(
mpiRank
==
0
)
writePartitioningMesh
(
"part.vtu"
);
}
...
...
@@ -237,6 +206,7 @@ namespace AMDiS {
(
"The mesh has less macro elements than number of mpi processes!
\n
"
);
}
void
ParallelDomainBase
::
setDofMatrix
(
DOFMatrix
*
mat
,
int
dispMult
,
int
dispAddRow
,
int
dispAddCol
)
{
...
...
@@ -259,62 +229,105 @@ namespace AMDiS {
cols
.
reserve
(
300
);
values
.
reserve
(
300
);
// === Traverse all rows of the dof matrix and insert row wise the values ===
// === to the petsc matrix. ===
for
(
cursor_type
cursor
=
begin
<
row
>
(
mat
->
getBaseMatrix
()),
cend
=
end
<
row
>
(
mat
->
getBaseMatrix
());
cursor
!=
cend
;
++
cursor
)
{
cols
.
clear
();
values
.
clear
();
// Global index of the current row dof.
int
globalRowDof
=
mapLocalGlobalDOFs
[
*
cursor
];
// Test if the current row dof is a periodic dof.
bool
periodicRow
=
(
periodicDof
.
count
(
globalRowDof
)
>
0
);
// === Traverse all non zero entries of the row and produce vector cols ===
// === with the column indices of all row entries and vector values ===
// === with the corresponding values.
for
(
icursor_type
icursor
=
begin
<
nz
>
(
cursor
),
icend
=
end
<
nz
>
(
cursor
);
icursor
!=
icend
;
++
icursor
)
{
// Set only non null values.
if
(
value
(
*
icursor
)
!=
0.0
)
{
// Global index of the current column index.
int
globalColDof
=
mapLocalGlobalDOFs
[
col
(
*
icursor
)];
// Calculate the exact position of the column index in the petsc matrix.
int
colIndex
=
globalColDof
*
dispMult
+
dispAddCol
;
// If the current row is not periodic, but the current dof index is periodic,
// we have to duplicate the value to the other corresponding periodic columns.
if
(
periodicRow
==
false
&&
periodicDof
.
count
(
globalColDof
)
>
0
)
{
// The value is assign to n matrix entries, therefore, every entry
// has only 1/n value of the original entry.
double
scalFactor
=
1.0
/
(
periodicDof
[
globalColDof
].
size
()
+
1.0
);
// Insert original entry.
cols
.
push_back
(
colIndex
);
values
.
push_back
(
value
(
*
icursor
)
*
0.5
);
values
.
push_back
(
value
(
*
icursor
)
*
scalFactor
);
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
periodicDof
[
globalColDof
].
begin
();
// Insert the periodic entries.
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
periodicDof
[
globalColDof
].
begin
();
it
!=
periodicDof
[
globalColDof
].
end
();
++
it
)
{
cols
.
push_back
(
*
it
*
dispMult
+
dispAddCol
);
values
.
push_back
(
value
(
*
icursor
)
*
0.5
);
}
values
.
push_back
(
value
(
*
icursor
)
*
scalFactor
);
}
}
else
{
// Neigher row nor column dof index is periodic, simple add entry.
cols
.
push_back
(
colIndex
);
values
.
push_back
(
value
(
*
icursor
));
}
}
}
// === Up to now we have assembled on row. Now, the row must be send to the ===
// === corresponding rows to the petsc matrix. ===
// Calculate petsc row index.
int
rowIndex
=
globalRowDof
*
dispMult
+
dispAddRow
;
if
(
periodicRow
)
{
int
diagIndex
=
-
1
;
// The row dof is periodic, so send dof to all the corresponding rows.
double
scalFactor
=
1.0
/
(
periodicDof
[
globalRowDof
].
size
()
+
1.0
);
int
diagIndex
=
-
1
;
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
values
.
size
());
i
++
)
{
if
(
cols
[
i
]
!=
rowIndex
)
values
[
i
]
*=
0.5
;
// Change only the non diagonal values in the col. For the diagonal test
// we compare the global dof indices of the dof matrix (not of the petsc
// matrix!).
if
((
cols
[
i
]
-
dispAddCol
)
/
dispMult
!=
globalRowDof
)
values
[
i
]
*=
scalFactor
;
else
diagIndex
=
i
;
}
MatSetValues
(
petscMatrix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
// Send the main row to the petsc matrix.
MatSetValues
(
petscMatrix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
// Set diagonal element to zero, i.e., the diagonal element of the current
// row is not send to the periodic row indices.
if
(
diagIndex
!=
-
1
)
values
[
diagIndex
]
=
0.0
;
// Send the row to all periodic row indices.
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
periodicDof
[
globalRowDof
].
begin
();
it
!=
periodicDof
[
globalRowDof
].
end
();
++
it
)
{
int
perRowIndex
=
*
it
*
dispMult
+
dispAddRow
;
MatSetValues
(
petscMatrix
,
1
,
&
perRowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
MatSetValues
(
petscMatrix
,
1
,
&
perRowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
}
}
else
{
MatSetValues
(
petscMatrix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
// The row dof is not periodic, simply send the row to the petsc matrix.
MatSetValues
(
petscMatrix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
}
}
}
...
...
@@ -323,13 +336,18 @@ namespace AMDiS {
void
ParallelDomainBase
::
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
dispMult
,
int
dispAdd
)
{
// Traverse all used dofs in the dof vector.
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
// Calculate global row index of the dof.
int
globalRow
=
mapLocalGlobalDOFs
[
dofIt
.
getDOFIndex
()];
// Calculate petsc index of the row dof.
int
index
=
globalRow
*
dispMult
+
dispAdd
;
if
(
periodicDof
.
count
(
globalRow
)
>
0
)
{
double
value
=
*
dofIt
*
0.5
;
// The dof index is periodic, so devide the value to all dof entries.
double
value
=
*
dofIt
/
(
periodicDof
[
globalRow
].
size
()
+
1.0
);
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
it
=
periodicDof
[
globalRow
].
begin
();
...
...
@@ -339,6 +357,7 @@ namespace AMDiS {
}
}
else
{
// The dof index is not periodic.
double
value
=
*
dofIt
;
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
}
...
...
@@ -348,6 +367,10 @@ namespace AMDiS {
void
ParallelDomainBase
::
fillPetscMatrix
(
DOFMatrix
*
mat
,
DOFVector
<
double
>
*
vec
)
{
FUNCNAME
(
"ParallelDomainBase::fillPetscMatrix()"
);
ERROR_EXIT
(
"Not yet tested for scalar problem definition!
\n
"
);
MatCreate
(
PETSC_COMM_WORLD
,
&
petscMatrix
);
MatSetSizes
(
petscMatrix
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
);
MatSetType
(
petscMatrix
,
MATAIJ
);
...
...
@@ -616,17 +639,13 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDomainBase::solvePetscMatrix()"
);
KSP
ksp
;
PC
pc
;
ERROR_EXIT
(
"Not yet tested for scalar problem definition!
\n
"
);
KSP
ksp
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp
);
KSPSetOperators
(
ksp
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPGetPC
(
ksp
,
&
pc
);
//PCSetType(pc, PCJACOBI);
PCSetType
(
pc
,
PCILU
);
KSPSetTolerances
(
ksp
,
1.e-7
,
PETSC_DEFAULT
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetType
(
ksp
,
KSPBCGS
);
//KSPSetType(ksp, KSPCG);
KSPMonitorSet
(
ksp
,
myKSPMonitor
,
PETSC_NULL
,
0
);
KSPSolve
(
ksp
,
petscRhsVec
,
petscSolVec
);
...
...
@@ -702,21 +721,24 @@ namespace AMDiS {
VecAssemblyEnd(petscSolVec);
#endif
KSP
solver
;
// === Init Petsc
solver
. ===
KSP
solver
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetType
(
solver
,
KSPBCGS
);
KSPMonitorSet
(
solver
,
myKSPMonitor
,
PETSC_NULL
,
0
);
KSPSetFromOptions
(
solver
);
// Do not delete the solution vector, use it for the initial guess.
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
// === Run Petsc. ===
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
// === Transfere values from Petsc's solution vectors to the dof vectors.
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
...
...
@@ -728,8 +750,14 @@ namespace AMDiS {
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
// === Synchronize dofs at common dofs, i.e., dofs that correspond to more ===
// === than one partition. ===
synchVectors
(
vec
);
// === Print information about solution process. ===
int
iterations
=
0
;
KSPGetIterationNumber
(
solver
,
&
iterations
);
MSG
(
" Number of iterations: %d
\n
"
,
iterations
);
...
...
@@ -740,6 +768,9 @@ namespace AMDiS {
VecNorm
(
petscTmpVec
,
NORM_2
,
&
norm
);
MSG
(
" Residual norm: %e
\n
"
,
norm
);
// === Destroy Petsc's variables. ===
MatDestroy
(
petscMatrix
);
VecDestroy
(
petscRhsVec
);
VecDestroy
(
petscSolVec
);
...
...
@@ -794,7 +825,7 @@ namespace AMDiS {
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
DOFVector
<
double
>
*
dofvec
=
vec
.
getDOFVector
(
j
);
for
(
int
k
=
0
;
k
<
nRecvDOFs
;
k
++
)
(
*
dofvec
)[
*
(
recvIt
->
second
)[
k
]]
=
recvBuffers
[
i
][
counter
++
];
(
*
dofvec
)[
*
(
recvIt
->
second
)[
k
]]
=
recvBuffers
[
i
][
counter
++
];
}
delete
[]
recvBuffers
[
i
];
...
...
@@ -911,7 +942,7 @@ namespace AMDiS {
}
void
ParallelDomainBase
::
createInteriorBoundaryInfo
(
DofContainer
&
rankDofs
)
void
ParallelDomainBase
::
createInteriorBoundaryInfo
()
{
FUNCNAME
(
"ParallelDomainBase::createInteriorBoundaryInfo()"
);
...
...
@@ -920,7 +951,9 @@ namespace AMDiS {
TEST_EXIT
(
mesh
->
getDim
()
==
2
)
(
"getBoundary(i) returns always i-th edge, generalize for 3d!
\n
"
);
// === First, create all the information about the interior boundaries. ===
// === First, traverse the mesh and search for all elements that have an ===
// === boundary with an element within another partition. ===
TraverseStack
stack
;
ElInfo
*
elInfo
=
...
...
@@ -932,6 +965,7 @@ namespace AMDiS {
PartitionElementData
*
partitionData
=
dynamic_cast
<
PartitionElementData
*>
(
element
->
getElementData
(
PARTITION_ED
));
// Check, if the element is within rank's partition.
if
(
partitionData
->
getPartitionStatus
()
==
IN
)
{
for
(
int
i
=
0
;
i
<
3
;
i
++
)
{
if
(
!
elInfo
->
getNeighbour
(
i
))
...
...
@@ -942,6 +976,11 @@ namespace AMDiS {
if
(
neighbourPartitionData
->
getPartitionStatus
()
==
OUT
)
{
// We have found an element that is in rank's partition, but has a
// neighbour outside of the rank's partition.
// === Create information about the boundary between the two elements. ===
AtomicBoundary
bound
;
bound
.
rankObject
.
el
=
element
;
bound
.
rankObject
.
elIndex
=
element
->
getIndex
();
...
...
@@ -958,8 +997,18 @@ namespace AMDiS {
TEST_EXIT_DBG
(
i
!=
2
||
elInfo
->
getSideOfNeighbour
(
i
)
==
2
)
(
"Should not happen!
\n
"
);
// Get rank number of the neighbouring element.
int
otherElementRank
=
partitionVec
[
elInfo
->
getNeighbour
(
i
)
->
getIndex
()];
// === Add the boundary information object to the corresponding overall ===
// === boundary. There are three possibilities: if the boundary is a ===
// === periodic boundary, just add it to \ref periodicBounadry. Here it ===
// === does not matter which rank is responsible for this boundary. ===
// === Otherwise, the boundary is added either to \ref myIntBoundary or ===
// === to \ref otherIntBoundary. It dependes on which rank is respon- ===
// === sible for this boundary. ===
if
(
BoundaryManager
::
isBoundaryPeriodic
(
elInfo
->
getBoundary
(
i
)))
{
// We have found an element that is at an interior, periodic boundary.
AtomicBoundary
&
b
=
periodicBoundary
.
getNewAtomic
(
otherElementRank
);
...
...
@@ -978,17 +1027,22 @@ namespace AMDiS {
elInfo
=
stack
.
traverseNext
(
elInfo
);
}
// === Once we have this information, we must care about their order. Eventually ===
// === all the boundaries have to be in the same order on both ranks that share ===
// === the bounday. ===
// === Once we have this information, we must care about the order of the atomic ===
// === bounds in the three boundary handling object. Eventually all the bound- ===
// === aries have to be in the same order on both ranks that share the bounday. ===
std
::
vector
<
int
*>
sendBuffers
,
recvBuffers
;
// First we communicate myInt/otherIntBoundary, and than the periodic boundaries.
MPI
::
Request
request
[
max
(
myIntBoundary
.
boundary
.
size
()
+
otherIntBoundary
.
boundary
.
size
(),
periodicBoundary
.
boundary
.
size
())];
int
requestCounter
=
0
;
// === The owner of the boundaries send their atomic boundary order to the ===
// === neighbouring ranks. ===
for
(
RankToBoundMap
::
iterator
rankIt
=
myIntBoundary
.
boundary
.
begin
();
rankIt
!=
myIntBoundary
.
boundary
.
end
();
++
rankIt
)
{
int
nSendInt
=
rankIt
->
second
.
size
();
...
...
@@ -1159,6 +1213,29 @@ namespace AMDiS {
createDofMemberInfo
(
partitionDOFs
,
rankDofs
,
rankAllDofs
,
boundaryDofs
,
vertexDof
);
#if 0
// TODO: MACHE RICHTIGE FUNKTION
if (mpiRank == 0) {
std::cout << "RANK OWNED DOFS: " << std::endl;
for (DofContainer::iterator dofit = rankDofs.begin();
dofit != rankDofs.end(); ++dofit) {
std::cout << **dofit << std::endl;
WorldVector<double> coords;
mesh->getDofIndexCoords(*dofit, feSpace, coords);
coords.print();
}
std::cout << "RANK ALL DOFS: " << std::endl;
for (DofContainer::iterator dofit = rankAllDofs.begin();
dofit != rankAllDofs.end(); ++dofit) {
std::cout << **dofit << std::endl;
WorldVector<double> coords;
mesh->getDofIndexCoords(*dofit, feSpace, coords);
coords.print();
}
}
#endif
// === BEGIN EXP
DofIndexToBool
rankAllDofIndices
;
...
...
@@ -1212,7 +1289,6 @@ namespace AMDiS {
i
++
;
}
// === Create for all rank owned dofs a new global indexing. ===
// Stores for dofs in rank a new global index.
...
...
@@ -1228,7 +1304,6 @@ namespace AMDiS {
i
++
;
}
// === Create information which dof indices must be send and which must ===
// === be received. ===
...
...
@@ -1455,16 +1530,35 @@ namespace AMDiS {
sendDofs
.
clear
();
recvDofs
.
clear
();
for
(
RankToDofContainer
::
iterator
it
=
oldSendDofs
.
begin
();
it
!=
oldSendDofs
.
end
();
++
it
)
{
for
(
DofContainer
::
iterator
dofIt
=
it
->
second
.
begin
();
dofIt
!=
it
->
second
.
end
();
++
dofIt
)
{
if
(
oldVertexDof
[
*
dofIt
])
sendDofs
[
it
->
first
].
push_back
(
*
dofIt
);
}
}
for
(
RankToDofContainer
::
iterator
it
=
oldRecvDofs
.
begin
();
it
!=
oldRecvDofs
.
end
();
++
it
)
{
for
(
DofContainer
::
iterator
dofIt
=
it
->
second
.
begin
();
dofIt
!=
it
->
second
.
end
();
++
dofIt
)
{
if
(
oldVertexDof
[
*
dofIt
])
{
recvDofs
[
it
->
first
].
push_back
(
*
dofIt
);
DofContainer
::
iterator
eraseIt
=
find
(
rankDofs
.
begin
(),
rankDofs
.
end
(),
*
dofIt
);
if
(
eraseIt
!=
rankDofs
.
end
())
rankDofs
.
erase
(
eraseIt
);
}
}
}
for
(
RankToBoundMap
::
iterator
it
=
myIntBoundary
.
boundary
.
begin
();
it
!=
myIntBoundary
.
boundary
.
end
();
++
it
)
{
DofContainer
&
dofsToSend
=
sendDofs
[
it
->
first
];
for
(
DofContainer
::
iterator
iit
=
oldSendDofs
[
it
->
first
].
begin
();
iit
!=
oldSendDofs
[
it
->
first
].
end
();
++
iit
)
if
(
oldVertexDof
[
*
iit
])
dofsToSend
.
push_back
(
*
iit
);
for
(
std
::
vector
<
AtomicBoundary
>::
iterator
boundIt
=
it
->
second
.
begin
();
boundIt
!=
it
->
second
.
end
();
++
boundIt
)
{
...
...
@@ -1487,16 +1581,6 @@ namespace AMDiS {
it
!=
otherIntBoundary
.
boundary
.
end
();
++
it
)
{
DofContainer
&
dofsToRecv
=
recvDofs
[
it
->
first
];
for
(
DofContainer
::
iterator
iit
=
oldRecvDofs
[
it
->
first
].
begin
();
iit
!=
oldRecvDofs
[
it
->
first
].
end
();
++
iit
)
if
(
oldVertexDof
[
*
iit
])
{
dofsToRecv
.
push_back
(
*
iit
);
DofContainer
::
iterator
eraseIt
=
find
(
rankDofs
.
begin
(),
rankDofs
.
end
(),
*
iit
);
if
(
eraseIt
!=
rankDofs
.
end
())
rankDofs
.
erase
(
eraseIt
);
}
for
(
std
::
vector
<
AtomicBoundary
>::
iterator
boundIt
=
it
->
second
.
begin
();
boundIt
!=
it
->
second
.
end
();
++
boundIt
)
{
...
...
@@ -1873,12 +1957,12 @@ namespace AMDiS {
int
*
sendbuf
=
new
int
[
dofs
.
size
()];
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dofs
.
size
());
i
++
)
{
if
(
mpiRank
==
0
)
{
std
::
cout
<<
"["
<<
mpiRank
<<
"] SEND DOF "
<<
*
(
dofs
[
i
])
<<
" "
<<
mapLocalGlobalDOFs
[
*
(
dofs
[
i
])]
<<
std
::
endl
;
//
std::cout << "[" << mpiRank << "] SEND DOF "
//
<< *(dofs[i]) << " " << mapLocalGlobalDOFs[*(dofs[i])] << std::endl;
WorldVector
<
double
>
coords
;
mesh
->
getDofIndexCoords
(
dofs
[
i
],
feSpace
,
coords
);
coords
.
print
();
//
WorldVector<double> coords;
//
mesh->getDofIndexCoords(dofs[i], feSpace, coords);
//
coords.print();
}
sendbuf
[
i
]
=
mapLocalGlobalDOFs
[
*
(
dofs
[
i
])];
...
...
@@ -1920,12 +2004,12 @@ namespace AMDiS {
for
(
int
j
=
0
;
j
<
static_cast
<
int
>
(
dofs
.
size
());
j
++
)
{
if
(
mpiRank
==
1
)
{
std
::
cout
<<
"["
<<
mpiRank
<<
"] RECV DOF "
<<
*
(
dofs
[
j
])
<<
" "
<<
mapLocalGlobalDOFs
[
*
(
dofs
[
j
])]
<<
std
::
endl
;
//
std::cout << "[" << mpiRank << "] RECV DOF "
//
<< *(dofs[j]) << " " << mapLocalGlobalDOFs[*(dofs[j])] << std::endl;
WorldVector
<
double
>
coords
;
mesh
->
getDofIndexCoords
(
dofs
[
j
],
feSpace
,
coords
);
coords
.
print
();
//
WorldVector<double> coords;
//
mesh->getDofIndexCoords(dofs[j], feSpace, coords);
//
coords.print();
}
periodicDof
[
mapLocalGlobalDOFs
[
*
(
dofs
[
j
])]].
insert
(
recvBuffers
[
i
][
j
]);
...
...
@@ -1938,11 +2022,11 @@ namespace AMDiS {
for
(
PeriodicDofMap
::
iterator
it
=
periodicDof
.
begin
();
it
!=
periodicDof
.
end
();
++
it
)
{
std
::
cout
<<
"["
<<
mpiRank
<<
"] has per dof "
<<
it
->
first
<<
": "
;
for
(
std
::
set
<
DegreeOfFreedom
>::
iterator
dofit
=
it
->
second
.
begin
();
dofit
!=
it
->
second
.
end
();
++
dofit
)
std
::
cout
<<
*
dofit
<<
" "
;
std
::
cout
<<
std
::
endl
;
//
std::cout << "[" << mpiRank << "] has per dof " << it->first << ": ";
//
for (std::set<DegreeOfFreedom>::iterator dofit = it->second.begin();
//
dofit != it->second.end(); ++dofit)
//
std::cout << *dofit << " ";
//
std::cout << std::endl;
int
localdof
=
0
;
...
...
@@ -1953,10 +2037,34 @@ namespace AMDiS {
break
;
}
WorldVector
<
double
>
coords
;
mesh
->
getDofIndexCoords
(
localdof
,
feSpace
,
coords
);
coords
.
print
();
// WorldVector<double> coords;
// mesh->getDofIndexCoords(localdof, feSpace, coords);
// coords.print();
}
for
(
PeriodicDofMap
::
iterator
it
=
periodicDof
.
begin
();
it
!=
periodicDof
.
end
();
++
it
)
{
if
(
it
->
second
.
size
()
==
2
)
{
std
::
cout
<<
"IN RANK "
<<
mpiRank
<<
" and dof "
<<
it
->
first
<<
": "
<<
*
(
it
->
second
.
begin
())
<<
" "
<<
*
(
++
(
it
->
second
.
begin
()))
<<
std
::
endl
;
}
}
switch
(
mpiRank
)
{
case
0
:
periodicDof
[
0
].
insert
(
3136
);
break
;
case
1
:
periodicDof
[
1024
].
insert
(
2080
);
break
;
case
2
:
periodicDof
[
2080
].
insert
(
1024
);
break
;
case
3
:
periodicDof
[
3136
].
insert
(
0
);
break
;
}
}
...
...
@@ -2317,6 +2425,71 @@ namespace AMDiS {
}
void
ParallelDomainBase
::
writeMapLocalGlobal
(
int
rank
)
{
if
(
rank
==
-
1
||
mpiRank
==
rank
)
{
std
::
cout
<<
"====== DOF MAP LOCAL -> GLOBAL ====== "
<<
std
::
endl
;
for
(
DofMapping
::
iterator
it
=
mapLocalGlobalDOFs
.
begin
();
it
!=
mapLocalGlobalDOFs
.
end
();
it
++
)
{
DegreeOfFreedom
localdof
=
-
1
;
if
(
mapLocalToDofIndex
.
count
(
it
->
first
)
>
0
)
localdof
=
mapLocalToDofIndex
[
it
->
first
];
std
::
cout
<<
"DOF "
<<
it
->
first
<<
" "
<<
it
->
second
<<
" "
<<
localdof
<<
std
::
endl
;
WorldVector
<
double
>
coords
;
mesh
->
getDofIndexCoords
(
it
->
first
,
feSpace
,
coords
);
coords
.
print
();
for
(
RankToDofContainer
::
iterator
rankit
=
sendDofs
.
begin
();
rankit
!=
sendDofs
.
end
();
++
rankit
)
{
for
(
DofContainer
::
iterator
dofit
=
rankit
->
second
.
begin
();
dofit
!=
rankit
->
second
.
end
();
++
dofit
)
if
(
**
dofit
==
it
->
first
)
std
::
cout
<<
"SEND DOF TO "
<<
rankit
->
first
<<
std
::
endl
;
}
for
(
RankToDofContainer
::
iterator
rankit
=
recvDofs
.
begin
();
rankit
!=
recvDofs
.
end
();
++
rankit
)
{
for
(
DofContainer
::
iterator
dofit
=
rankit
->
second
.
begin
();