Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
23da3114
Commit
23da3114
authored
Oct 05, 2009
by
Thomas Witkowski
Browse files
Bugfix in parallel domain decomposition with higher order elements.
parent
5f6c2d39
Changes
2
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/ParallelDomainBase.cc
View file @
23da3114
...
...
@@ -241,7 +241,7 @@ namespace AMDiS {
}
void
ParallelDomainBase
::
setDofVector
(
DOFVector
<
double
>*
vec
,
void
ParallelDomainBase
::
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
dispMult
,
int
dispAdd
)
{
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
...
...
@@ -249,7 +249,7 @@ namespace AMDiS {
int
index
=
mapLocalGlobalDOFs
[
dofIt
.
getDOFIndex
()]
*
dispMult
+
dispAdd
;
double
value
=
*
dofIt
;
VecSetValues
(
petsc
Rhs
Vec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
}
}
...
...
@@ -277,7 +277,7 @@ namespace AMDiS {
MatAssemblyBegin
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
setDofVector
(
vec
);
setDofVector
(
petscRhsVec
,
vec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -512,7 +512,7 @@ namespace AMDiS {
MatAssemblyEnd
(
petscMatrix
,
MAT_FINAL_ASSEMBLY
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -602,6 +602,15 @@ namespace AMDiS {
{
FUNCNAME
(
"ParallelDomainBase::solvePetscMatrix()"
);
#if 0
// Set old solution to be initiual guess for petsc solver.
for (int i = 0; i < nComponents; i++)
setDofVector(petscSolVec, vec->getDOFVector(i), nComponents, i);
VecAssemblyBegin(petscSolVec);
VecAssemblyEnd(petscSolVec);
#endif
KSP
solver
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
...
...
@@ -612,6 +621,8 @@ namespace AMDiS {
KSPSetType
(
solver
,
KSPBCGS
);
KSPMonitorSet
(
solver
,
myKSPMonitor
,
PETSC_NULL
,
0
);
KSPSetFromOptions
(
solver
);
// Do not delete the solution vector, use it for the initial guess.
// KSPSetInitialGuessNonzero(solver, PETSC_TRUE);
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
...
...
@@ -1188,10 +1199,11 @@ namespace AMDiS {
ElementDofIterator
elDofIt
(
feSpace
);
DofSet
rankDofSet
;
// The vertexDof list must be recreated from the scratch. Otherwise, it is possible
// that it maps dofs, that were removed (this is also possible, if the mesh was
// refined, e.g., center dofs of an element are not dofs of the children).
DofToBool
oldVertexDof
=
vertexDof
;
vertexDof
.
clear
();
TraverseStack
stack
;
...
...
@@ -1237,7 +1249,7 @@ namespace AMDiS {
for
(
DofContainer
::
iterator
iit
=
oldSendDofs
[
it
->
first
].
begin
();
iit
!=
oldSendDofs
[
it
->
first
].
end
();
++
iit
)
if
(
v
ertexDof
[
*
iit
])
if
(
oldV
ertexDof
[
*
iit
])
dofsToSend
.
push_back
(
*
iit
);
DofContainer
dofs
;
...
...
@@ -1265,7 +1277,7 @@ namespace AMDiS {
for
(
DofContainer
::
iterator
iit
=
oldRecvDofs
[
it
->
first
].
begin
();
iit
!=
oldRecvDofs
[
it
->
first
].
end
();
++
iit
)
if
(
v
ertexDof
[
*
iit
])
{
if
(
oldV
ertexDof
[
*
iit
])
{
dofsToRecv
.
push_back
(
*
iit
);
DofContainer
::
iterator
eraseIt
=
find
(
rankDofs
.
begin
(),
rankDofs
.
end
(),
*
iit
);
...
...
@@ -1334,7 +1346,6 @@ namespace AMDiS {
i
++
;
}
// === Send new DOF indices. ===
std
::
vector
<
int
*>
sendBuffers
(
sendDofs
.
size
());
...
...
@@ -1350,12 +1361,11 @@ namespace AMDiS {
sendBuffers
[
i
]
=
new
int
[
nSendDofs
];
int
c
=
0
;
for
(
DofContainer
::
iterator
dofIt
=
sendIt
->
second
.
begin
();
dofIt
!=
sendIt
->
second
.
end
();
++
dofIt
)
{
dofIt
!=
sendIt
->
second
.
end
();
++
dofIt
)
sendBuffers
[
i
][
c
++
]
=
rankDofsNewGlobalIndex
[
*
dofIt
];
}
request
[
requestCounter
++
]
=
mpiComm
.
Isend
(
sendBuffers
[
i
],
nSendDofs
,
MPI_INT
,
sendIt
->
first
,
0
);
mpiComm
.
Isend
(
sendBuffers
[
i
],
nSendDofs
,
MPI_INT
,
sendIt
->
first
,
0
);
}
i
=
0
;
...
...
@@ -1363,9 +1373,9 @@ namespace AMDiS {
recvIt
!=
recvDofs
.
end
();
++
recvIt
,
i
++
)
{
int
nRecvDofs
=
recvIt
->
second
.
size
();
recvBuffers
[
i
]
=
new
int
[
nRecvDofs
];
request
[
requestCounter
++
]
=
mpiComm
.
Irecv
(
recvBuffers
[
i
],
nRecvDofs
,
MPI_INT
,
recvIt
->
first
,
0
);
mpiComm
.
Irecv
(
recvBuffers
[
i
],
nRecvDofs
,
MPI_INT
,
recvIt
->
first
,
0
);
}
MPI
::
Request
::
Waitall
(
requestCounter
,
request
);
...
...
AMDiS/src/ParallelDomainBase.h
View file @
23da3114
...
...
@@ -254,7 +254,8 @@ namespace AMDiS {
void
setDofMatrix
(
DOFMatrix
*
mat
,
int
dispMult
=
1
,
int
dispAddRow
=
0
,
int
dispAddCol
=
0
);
void
setDofVector
(
DOFVector
<
double
>*
vec
,
int
disMult
=
1
,
int
dispAdd
=
0
);
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
disMult
=
1
,
int
dispAdd
=
0
);
void
DbgCreateElementMap
(
ElementIdxToDofs
&
elMap
);
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment