Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
2e22f957
Commit
2e22f957
authored
Jan 03, 2012
by
Thomas Witkowski
Browse files
PETSc global block solver should now work correctly.
parent
ac22232d
Changes
9
Show whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
2e22f957
...
...
@@ -78,7 +78,7 @@ namespace AMDiS {
partitioner
(
NULL
),
nRankDofs
(
0
),
nOverallDofs
(
0
),
r
s
tart
(
0
),
r
S
tart
Dofs
(
0
),
deserialized
(
false
),
writeSerializationFile
(
false
),
repartitioningAllowed
(
false
),
...
...
@@ -1837,13 +1837,13 @@ namespace AMDiS {
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs
=
rankDofs
.
size
();
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
s
tart
,
nOverallDofs
);
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
S
tart
Dofs
,
nOverallDofs
);
// Stores for all rank owned DOFs a new global index.
DofIndexMap
rankDofsNewGlobalIndex
;
for
(
int
i
=
0
;
i
<
nRankDofs
;
i
++
)
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
s
tart
;
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
S
tart
Dofs
;
// === Send and receive new DOF indices. ===
...
...
@@ -1906,7 +1906,7 @@ namespace AMDiS {
MSG
(
"------------- Debug information -------------
\n
"
);
MSG
(
"nRankDofs = %d
\n
"
,
nRankDofs
);
MSG
(
"nOverallDofs = %d
\n
"
,
nOverallDofs
);
MSG
(
"r
s
tart %d
\n
"
,
r
s
tart
);
MSG
(
"r
S
tart
Dofs
%d
\n
"
,
r
S
tart
Dofs
);
stringstream
oss
;
oss
<<
debugOutputDir
<<
"elementIndex-"
<<
mpiRank
<<
".vtu"
;
...
...
@@ -2170,7 +2170,7 @@ namespace AMDiS {
serialize
(
out
,
periodicDof
);
serialize
(
out
,
periodicDofAssociations
);
SerUtil
::
serialize
(
out
,
r
s
tart
);
SerUtil
::
serialize
(
out
,
r
S
tart
Dofs
);
SerUtil
::
serialize
(
out
,
macroElementNeighbours
);
int
nSize
=
allMacroElements
.
size
();
...
...
@@ -2231,7 +2231,7 @@ namespace AMDiS {
deserialize
(
in
,
periodicDof
);
deserialize
(
in
,
periodicDofAssociations
);
SerUtil
::
deserialize
(
in
,
r
s
tart
);
SerUtil
::
deserialize
(
in
,
r
S
tart
Dofs
);
SerUtil
::
deserialize
(
in
,
macroElementNeighbours
);
int
nSize
=
0
;
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
2e22f957
...
...
@@ -136,6 +136,12 @@ namespace AMDiS {
return
nRankDofs
;
}
/// Returns \ref rStartDofs, the first global DOF index owned by rank.
inline
int
getStartDofs
()
{
return
rStartDofs
;
}
/// Returns \ref nOverallDofs, the global number of DOFs.
inline
int
getNumberOverallDofs
()
{
...
...
@@ -225,11 +231,6 @@ namespace AMDiS {
return
lastMeshChangeIndex
;
}
inline
int
getRstart
()
{
return
rstart
;
}
inline
int
getMpiRank
()
{
return
mpiRank
;
...
...
@@ -521,6 +522,9 @@ namespace AMDiS {
/// Number of DOFs in the rank mesh.
int
nRankDofs
;
/// Is the index of the first global DOF index, which is owned by the rank.
int
rStartDofs
;
/// Number of DOFs in the whole domain.
int
nOverallDofs
;
...
...
@@ -604,10 +608,6 @@ namespace AMDiS {
/// repartitioned.
vector
<
DOFVector
<
double
>*>
interchangeVectors
;
/// Is the index of the first row of the linear system, which is owned by
/// the rank.
int
rstart
;
/** \brief
* If the problem definition has been read from a serialization file, this
* variable is true, otherwise it is false. This variable is used to stop the
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
2e22f957
...
...
@@ -25,7 +25,8 @@ namespace AMDiS {
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
ProblemIterationInterface
*
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
),
deleteSolver
(
true
)
{
FUNCNAME
(
"PetscProblemStat::PetscProblemStat()"
);
...
...
@@ -54,6 +55,14 @@ namespace AMDiS {
}
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
PetscSolver
*
solver
)
:
ParallelProblemStatBase
(
nameStr
,
NULL
),
petscSolver
(
solver
),
deleteSolver
(
false
)
{}
void
PetscProblemStat
::
initialize
(
Flag
initFlag
,
ProblemStatSeq
*
adoptProblem
,
Flag
adoptFlag
)
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
2e22f957
...
...
@@ -40,8 +40,12 @@ namespace AMDiS {
PetscProblemStat
(
std
::
string
nameStr
,
ProblemIterationInterface
*
problemIteration
=
NULL
);
PetscProblemStat
(
std
::
string
nameStr
,
PetscSolver
*
solver
);
~
PetscProblemStat
()
{
if
(
deleteSolver
)
delete
petscSolver
;
}
...
...
@@ -55,6 +59,8 @@ namespace AMDiS {
protected:
PetscSolver
*
petscSolver
;
bool
deleteSolver
;
};
typedef
PetscProblemStat
ParallelProblemStat
;
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
2e22f957
...
...
@@ -40,4 +40,35 @@ namespace AMDiS {
}
}
void
PetscSolver
::
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
)
{
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
ISCreateGeneral
(
PETSC_COMM_WORLD
,
originIndex
.
size
(),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
&
originIs
);
ISCreateGeneral
(
PETSC_COMM_WORLD
,
destIndex
.
size
(),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
&
destIs
);
VecScatter
scatter
;
VecScatterCreate
(
originVec
,
originIs
,
destVec
,
destIs
,
&
scatter
);
VecScatterBegin
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
VecScatterEnd
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
ISDestroy
(
&
originIs
);
ISDestroy
(
&
destIs
);
VecScatterDestroy
(
&
scatter
);
}
}
AMDiS/src/parallel/PetscSolver.h
View file @
2e22f957
...
...
@@ -90,6 +90,20 @@ namespace AMDiS {
bool
iterationCounter
=
true
,
bool
residual
=
true
);
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
*
* \param[in] originVec The PETSc vector from which we copy from.
* \param[out] destVec The PETSc vector we copy too.
* \param[in] originIndex Set of global indices referring to the
* origin vector.
* \param[in] destIndex Set of global indices referring to the
* destination vector.
*/
void
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
);
protected:
MeshDistributor
*
meshDistributor
;
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
2e22f957
...
...
@@ -25,22 +25,14 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
nestMat
=
new
Mat
*
[
nComponents
];
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
nestMat
[
i
]
=
new
Mat
[
nComponents
];
nestMat
.
resize
(
nComponents
*
nComponents
);
// === Transfer values from DOF matrices to the PETSc matrix. ===
...
...
@@ -51,16 +43,17 @@ namespace AMDiS {
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
(
nestMat
[
i
][
j
]));
setDofMatrix
(
nestMat
[
i
][
j
],
(
*
mat
)[
i
][
j
]);
&
(
nestMat
[
i
*
nComponents
+
j
]));
setDofMatrix
(
nestMat
[
i
*
nComponents
+
j
],
(
*
mat
)[
i
][
j
]);
MatAssemblyBegin
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
}
else
{
nestMat
[
i
][
j
]
=
PETSC_NULL
;
nestMat
[
i
*
nComponents
+
j
]
=
PETSC_NULL
;
}
MatCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
nComponents
,
PETSC_NULL
,
&
(
nestMat
[
0
]
[
0
]
),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
petscMatrix
);
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 2 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
...
...
@@ -71,12 +64,11 @@ namespace AMDiS {
// === Init PETSc solver. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetType
(
solver
,
KSPBCGS
);
KSPSetFromOptions
(
solver
);
PCSetFromOptions
(
pc
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
...
...
@@ -89,15 +81,22 @@ namespace AMDiS {
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
nComponents
=
vec
->
getSize
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
Vec
C
re
ateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
nest
Vec
.
re
size
(
nComponents
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
VecAssemblyBegin
(
nestVec
[
i
]);
VecAssemblyEnd
(
nestVec
[
i
]);
}
VecCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -110,21 +109,24 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
// PETSc.
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Sol
Vec
);
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Rhs
Vec
);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
Vec
tmp
;
VecNestGetSubVec
(
petscRhsVec
,
i
,
&
tmp
);
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
VecGetArray
(
tmp
,
&
vecPointer
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
vecPointer
[
i
*
nComponents
+
j
];
}
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
vecPointer
[
j
];
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
VecRestoreArray
(
tmp
,
&
vecPointer
);
}
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
...
...
@@ -132,12 +134,10 @@ namespace AMDiS {
meshDistributor
->
synchVector
(
vec
);
// Print iteration counter and residual norm of the solution.
printSolutionInfo
(
adaptInfo
);
// === Destroy PETSc's variables. ===
VecDestroy
(
&
petscRhsVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
VecDestroy
(
&
(
nestVec
[
i
]));
}
...
...
@@ -145,20 +145,12 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::destroyMatrixData()"
);
for
(
unsigned
int
i
=
0
;
i
<
nestMat
.
size
();
i
++
)
if
(
nestMat
[
i
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
]));
MatDestroy
(
&
petscMatrix
);
KSPDestroy
(
&
solver
);
VecDestroy
(
&
petscSolVec
);
VecDestroy
(
&
petscTmpVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
if
(
nestMat
[
i
][
j
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
][
j
]));
}
delete
[]
nestMat
[
i
];
}
delete
[]
nestMat
;
}
...
...
@@ -210,31 +202,21 @@ namespace AMDiS {
values
.
push_back
(
value
(
*
icursor
));
}
MatSetValues
(
petscMat
rix
,
1
,
&
rowIndex
,
cols
.
size
(),
MatSetValues
(
petscMat
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
}
}
void
PetscSolverGlobalBlockMatrix
::
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
nComponents
,
int
row
,
bool
rankOnly
)
DOFVector
<
double
>*
vec
)
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::setDofVector()"
);
// Traverse all used DOFs in the dof vector.
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
if
(
rankOnly
&&
!
meshDistributor
->
getIsRankDof
(
dofIt
.
getDOFIndex
()))
continue
;
// Calculate global row index of the DOF.
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
// Calculate PETSc index of the row DOF.
int
index
=
nComponents
*
row
+
globalRowDof
;
int
index
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
double
value
=
*
dofIt
;
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
View file @
2e22f957
...
...
@@ -52,11 +52,18 @@ namespace AMDiS {
void
setDofMatrix
(
Mat
&
petscMat
,
DOFMatrix
*
mat
);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
nComponents
,
int
row
,
bool
rankOnly
=
false
);
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
);
virtual
void
setBlockPreconditioner
(
PC
&
pc
)
{
PCSetFromOptions
(
pc
);
}
protected:
Mat
**
nestMat
;
vector
<
Mat
>
nestMat
;
vector
<
Vec
>
nestVec
;
int
nComponents
;
};
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
2e22f957
...
...
@@ -66,9 +66,9 @@ namespace AMDiS {
#if (DEBUG != 0)
int
a
,
b
;
MatGetOwnershipRange
(
petscMatrix
,
&
a
,
&
b
);
TEST_EXIT
(
a
==
meshDistributor
->
get
Rs
tart
()
*
nComponents
)
TEST_EXIT
(
a
==
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
)
(
"Wrong matrix ownership range!
\n
"
);
TEST_EXIT
(
b
==
meshDistributor
->
get
Rs
tart
()
*
nComponents
+
nRankRows
)
TEST_EXIT
(
b
==
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
+
nRankRows
)
(
"Wrong matrix ownership range!
\n
"
);
#endif
...
...
@@ -552,7 +552,7 @@ namespace AMDiS {
// This is the local row index of the local PETSc matrix.
int
localPetscRowIdx
=
petscRowIdx
-
meshDistributor
->
get
Rs
tart
()
*
nComponents
;
petscRowIdx
-
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
;
TEST_EXIT_DBG
(
localPetscRowIdx
>=
0
&&
localPetscRowIdx
<
nRankRows
)
(
"Should not happen!
\n
Debug info: localRowIdx = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d nCompontens = %d nRankRows = %d
\n
"
,
...
...
@@ -560,7 +560,7 @@ namespace AMDiS {
meshDistributor
->
mapLocalToGlobal
(
*
cursor
),
petscRowIdx
,
localPetscRowIdx
,
meshDistributor
->
get
Rs
tart
(),
meshDistributor
->
get
S
tart
Dofs
(),
nComponents
,
nRankRows
);
...
...
@@ -574,8 +574,8 @@ namespace AMDiS {
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the d_nnz values for this row, otherwise the o_nnz value.
if
(
petscColIdx
>=
meshDistributor
->
get
Rs
tart
()
*
nComponents
&&
petscColIdx
<
meshDistributor
->
get
Rs
tart
()
*
nComponents
+
nRankRows
)
if
(
petscColIdx
>=
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
&&
petscColIdx
<
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
+
nRankRows
)
d_nnz
[
localPetscRowIdx
]
++
;
else
o_nnz
[
localPetscRowIdx
]
++
;
...
...
@@ -626,14 +626,14 @@ namespace AMDiS {
int
r
=
it
->
second
[
i
].
first
;
int
c
=
it
->
second
[
i
].
second
;
int
localRowIdx
=
r
-
meshDistributor
->
get
Rs
tart
()
*
nComponents
;
int
localRowIdx
=
r
-
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
;
TEST_EXIT_DBG
(
localRowIdx
>=
0
&&
localRowIdx
<
nRankRows
)
(
"Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!
\n
"
,
r
,
localRowIdx
,
nRankRows
,
it
->
first
);
if
(
c
<
meshDistributor
->
get
Rs
tart
()
*
nComponents
||
c
>=
meshDistributor
->
get
Rs
tart
()
*
nComponents
+
nRankRows
)
if
(
c
<
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
||
c
>=
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
+
nRankRows
)
o_nnz
[
localRowIdx
]
++
;
else
d_nnz
[
localRowIdx
]
++
;
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment