Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
2e22f957
Commit
2e22f957
authored
Jan 03, 2012
by
Thomas Witkowski
Browse files
PETSc global block solver should now work correctly.
parent
ac22232d
Changes
9
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
2e22f957
...
@@ -78,7 +78,7 @@ namespace AMDiS {
...
@@ -78,7 +78,7 @@ namespace AMDiS {
partitioner
(
NULL
),
partitioner
(
NULL
),
nRankDofs
(
0
),
nRankDofs
(
0
),
nOverallDofs
(
0
),
nOverallDofs
(
0
),
r
s
tart
(
0
),
r
S
tart
Dofs
(
0
),
deserialized
(
false
),
deserialized
(
false
),
writeSerializationFile
(
false
),
writeSerializationFile
(
false
),
repartitioningAllowed
(
false
),
repartitioningAllowed
(
false
),
...
@@ -1837,13 +1837,13 @@ namespace AMDiS {
...
@@ -1837,13 +1837,13 @@ namespace AMDiS {
// Get displacment for global rank DOF ordering and global DOF number.
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs
=
rankDofs
.
size
();
nRankDofs
=
rankDofs
.
size
();
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
s
tart
,
nOverallDofs
);
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
S
tart
Dofs
,
nOverallDofs
);
// Stores for all rank owned DOFs a new global index.
// Stores for all rank owned DOFs a new global index.
DofIndexMap
rankDofsNewGlobalIndex
;
DofIndexMap
rankDofsNewGlobalIndex
;
for
(
int
i
=
0
;
i
<
nRankDofs
;
i
++
)
for
(
int
i
=
0
;
i
<
nRankDofs
;
i
++
)
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
s
tart
;
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
S
tart
Dofs
;
// === Send and receive new DOF indices. ===
// === Send and receive new DOF indices. ===
...
@@ -1906,7 +1906,7 @@ namespace AMDiS {
...
@@ -1906,7 +1906,7 @@ namespace AMDiS {
MSG
(
"------------- Debug information -------------
\n
"
);
MSG
(
"------------- Debug information -------------
\n
"
);
MSG
(
"nRankDofs = %d
\n
"
,
nRankDofs
);
MSG
(
"nRankDofs = %d
\n
"
,
nRankDofs
);
MSG
(
"nOverallDofs = %d
\n
"
,
nOverallDofs
);
MSG
(
"nOverallDofs = %d
\n
"
,
nOverallDofs
);
MSG
(
"r
s
tart %d
\n
"
,
r
s
tart
);
MSG
(
"r
S
tart
Dofs
%d
\n
"
,
r
S
tart
Dofs
);
stringstream
oss
;
stringstream
oss
;
oss
<<
debugOutputDir
<<
"elementIndex-"
<<
mpiRank
<<
".vtu"
;
oss
<<
debugOutputDir
<<
"elementIndex-"
<<
mpiRank
<<
".vtu"
;
...
@@ -2170,7 +2170,7 @@ namespace AMDiS {
...
@@ -2170,7 +2170,7 @@ namespace AMDiS {
serialize
(
out
,
periodicDof
);
serialize
(
out
,
periodicDof
);
serialize
(
out
,
periodicDofAssociations
);
serialize
(
out
,
periodicDofAssociations
);
SerUtil
::
serialize
(
out
,
r
s
tart
);
SerUtil
::
serialize
(
out
,
r
S
tart
Dofs
);
SerUtil
::
serialize
(
out
,
macroElementNeighbours
);
SerUtil
::
serialize
(
out
,
macroElementNeighbours
);
int
nSize
=
allMacroElements
.
size
();
int
nSize
=
allMacroElements
.
size
();
...
@@ -2231,7 +2231,7 @@ namespace AMDiS {
...
@@ -2231,7 +2231,7 @@ namespace AMDiS {
deserialize
(
in
,
periodicDof
);
deserialize
(
in
,
periodicDof
);
deserialize
(
in
,
periodicDofAssociations
);
deserialize
(
in
,
periodicDofAssociations
);
SerUtil
::
deserialize
(
in
,
r
s
tart
);
SerUtil
::
deserialize
(
in
,
r
S
tart
Dofs
);
SerUtil
::
deserialize
(
in
,
macroElementNeighbours
);
SerUtil
::
deserialize
(
in
,
macroElementNeighbours
);
int
nSize
=
0
;
int
nSize
=
0
;
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
2e22f957
...
@@ -136,6 +136,12 @@ namespace AMDiS {
...
@@ -136,6 +136,12 @@ namespace AMDiS {
return
nRankDofs
;
return
nRankDofs
;
}
}
/// Returns \ref rStartDofs, the first global DOF index owned by rank.
inline
int
getStartDofs
()
{
return
rStartDofs
;
}
/// Returns \ref nOverallDofs, the global number of DOFs.
/// Returns \ref nOverallDofs, the global number of DOFs.
inline
int
getNumberOverallDofs
()
inline
int
getNumberOverallDofs
()
{
{
...
@@ -225,11 +231,6 @@ namespace AMDiS {
...
@@ -225,11 +231,6 @@ namespace AMDiS {
return
lastMeshChangeIndex
;
return
lastMeshChangeIndex
;
}
}
inline
int
getRstart
()
{
return
rstart
;
}
inline
int
getMpiRank
()
inline
int
getMpiRank
()
{
{
return
mpiRank
;
return
mpiRank
;
...
@@ -521,6 +522,9 @@ namespace AMDiS {
...
@@ -521,6 +522,9 @@ namespace AMDiS {
/// Number of DOFs in the rank mesh.
/// Number of DOFs in the rank mesh.
int
nRankDofs
;
int
nRankDofs
;
/// Is the index of the first global DOF index, which is owned by the rank.
int
rStartDofs
;
/// Number of DOFs in the whole domain.
/// Number of DOFs in the whole domain.
int
nOverallDofs
;
int
nOverallDofs
;
...
@@ -604,10 +608,6 @@ namespace AMDiS {
...
@@ -604,10 +608,6 @@ namespace AMDiS {
/// repartitioned.
/// repartitioned.
vector
<
DOFVector
<
double
>*>
interchangeVectors
;
vector
<
DOFVector
<
double
>*>
interchangeVectors
;
/// Is the index of the first row of the linear system, which is owned by
/// the rank.
int
rstart
;
/** \brief
/** \brief
* If the problem definition has been read from a serialization file, this
* If the problem definition has been read from a serialization file, this
* variable is true, otherwise it is false. This variable is used to stop the
* variable is true, otherwise it is false. This variable is used to stop the
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
2e22f957
...
@@ -25,7 +25,8 @@ namespace AMDiS {
...
@@ -25,7 +25,8 @@ namespace AMDiS {
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
ProblemIterationInterface
*
problemIteration
)
ProblemIterationInterface
*
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
),
deleteSolver
(
true
)
{
{
FUNCNAME
(
"PetscProblemStat::PetscProblemStat()"
);
FUNCNAME
(
"PetscProblemStat::PetscProblemStat()"
);
...
@@ -54,6 +55,14 @@ namespace AMDiS {
...
@@ -54,6 +55,14 @@ namespace AMDiS {
}
}
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
PetscSolver
*
solver
)
:
ParallelProblemStatBase
(
nameStr
,
NULL
),
petscSolver
(
solver
),
deleteSolver
(
false
)
{}
void
PetscProblemStat
::
initialize
(
Flag
initFlag
,
void
PetscProblemStat
::
initialize
(
Flag
initFlag
,
ProblemStatSeq
*
adoptProblem
,
ProblemStatSeq
*
adoptProblem
,
Flag
adoptFlag
)
Flag
adoptFlag
)
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
2e22f957
...
@@ -40,9 +40,13 @@ namespace AMDiS {
...
@@ -40,9 +40,13 @@ namespace AMDiS {
PetscProblemStat
(
std
::
string
nameStr
,
PetscProblemStat
(
std
::
string
nameStr
,
ProblemIterationInterface
*
problemIteration
=
NULL
);
ProblemIterationInterface
*
problemIteration
=
NULL
);
PetscProblemStat
(
std
::
string
nameStr
,
PetscSolver
*
solver
);
~
PetscProblemStat
()
~
PetscProblemStat
()
{
{
delete
petscSolver
;
if
(
deleteSolver
)
delete
petscSolver
;
}
}
void
initialize
(
Flag
initFlag
,
void
initialize
(
Flag
initFlag
,
...
@@ -55,6 +59,8 @@ namespace AMDiS {
...
@@ -55,6 +59,8 @@ namespace AMDiS {
protected:
protected:
PetscSolver
*
petscSolver
;
PetscSolver
*
petscSolver
;
bool
deleteSolver
;
};
};
typedef
PetscProblemStat
ParallelProblemStat
;
typedef
PetscProblemStat
ParallelProblemStat
;
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
2e22f957
...
@@ -40,4 +40,35 @@ namespace AMDiS {
...
@@ -40,4 +40,35 @@ namespace AMDiS {
}
}
}
}
void
PetscSolver
::
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
)
{
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
ISCreateGeneral
(
PETSC_COMM_WORLD
,
originIndex
.
size
(),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
&
originIs
);
ISCreateGeneral
(
PETSC_COMM_WORLD
,
destIndex
.
size
(),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
&
destIs
);
VecScatter
scatter
;
VecScatterCreate
(
originVec
,
originIs
,
destVec
,
destIs
,
&
scatter
);
VecScatterBegin
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
VecScatterEnd
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
ISDestroy
(
&
originIs
);
ISDestroy
(
&
destIs
);
VecScatterDestroy
(
&
scatter
);
}
}
}
AMDiS/src/parallel/PetscSolver.h
View file @
2e22f957
...
@@ -90,6 +90,20 @@ namespace AMDiS {
...
@@ -90,6 +90,20 @@ namespace AMDiS {
bool
iterationCounter
=
true
,
bool
iterationCounter
=
true
,
bool
residual
=
true
);
bool
residual
=
true
);
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
*
* \param[in] originVec The PETSc vector from which we copy from.
* \param[out] destVec The PETSc vector we copy too.
* \param[in] originIndex Set of global indices referring to the
* origin vector.
* \param[in] destIndex Set of global indices referring to the
* destination vector.
*/
void
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
);
protected:
protected:
MeshDistributor
*
meshDistributor
;
MeshDistributor
*
meshDistributor
;
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
2e22f957
...
@@ -25,22 +25,14 @@ namespace AMDiS {
...
@@ -25,22 +25,14 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
double
wtime
=
MPI
::
Wtime
();
nComponents
=
mat
->
getNumRows
();
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
#if (DEBUG != 0)
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
#endif
nestMat
=
new
Mat
*
[
nComponents
];
nestMat
.
resize
(
nComponents
*
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
nestMat
[
i
]
=
new
Mat
[
nComponents
];
// === Transfer values from DOF matrices to the PETSc matrix. ===
// === Transfer values from DOF matrices to the PETSc matrix. ===
...
@@ -51,16 +43,17 @@ namespace AMDiS {
...
@@ -51,16 +43,17 @@ namespace AMDiS {
nRankRows
,
nRankRows
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
(
nestMat
[
i
][
j
]));
&
(
nestMat
[
i
*
nComponents
+
j
]));
setDofMatrix
(
nestMat
[
i
*
nComponents
+
j
],
(
*
mat
)[
i
][
j
]);
setDofMatrix
(
nestMat
[
i
][
j
],
(
*
mat
)[
i
][
j
]);
MatAssemblyBegin
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
}
else
{
}
else
{
nestMat
[
i
][
j
]
=
PETSC_NULL
;
nestMat
[
i
*
nComponents
+
j
]
=
PETSC_NULL
;
}
}
MatCreateNest
(
PETSC_COMM_WORLD
,
MatCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
nComponents
,
PETSC_NULL
,
nComponents
,
PETSC_NULL
,
nComponents
,
PETSC_NULL
,
&
(
nestMat
[
0
]
[
0
]
),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
petscMatrix
);
#if (DEBUG != 0)
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 2 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
MSG
(
"Fill petsc matrix 2 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
...
@@ -71,12 +64,11 @@ namespace AMDiS {
...
@@ -71,12 +64,11 @@ namespace AMDiS {
// === Init PETSc solver. ===
// === Init PETSc solver. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetType
(
solver
,
KSPBCGS
);
KSPSetFromOptions
(
solver
);
KSPSetFromOptions
(
solver
);
PCSetFromOptions
(
pc
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
}
...
@@ -89,15 +81,22 @@ namespace AMDiS {
...
@@ -89,15 +81,22 @@ namespace AMDiS {
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
nComponents
=
vec
->
getSize
();
nComponents
=
vec
->
getSize
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
Vec
C
re
ateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
nest
Vec
.
re
size
(
nComponents
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
VecAssemblyBegin
(
nestVec
[
i
]);
VecAssemblyEnd
(
nestVec
[
i
]);
}
VecCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
@@ -110,21 +109,24 @@ namespace AMDiS {
...
@@ -110,21 +109,24 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
// PETSc.
// PETSc.
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Sol
Vec
);
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Rhs
Vec
);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
Vec
tmp
;
VecNestGetSubVec
(
petscRhsVec
,
i
,
&
tmp
);
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
tmp
,
&
vecPointer
);
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
vecPointer
[
j
];
vecPointer
[
i
*
nComponents
+
j
];
}
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
VecRestoreArray
(
tmp
,
&
vecPointer
);
}
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
...
@@ -132,12 +134,10 @@ namespace AMDiS {
...
@@ -132,12 +134,10 @@ namespace AMDiS {
meshDistributor
->
synchVector
(
vec
);
meshDistributor
->
synchVector
(
vec
);
// Print iteration counter and residual norm of the solution.
printSolutionInfo
(
adaptInfo
);
// === Destroy PETSc's variables. ===
// === Destroy PETSc's variables. ===
VecDestroy
(
&
petscRhsVec
);
VecDestroy
(
&
petscRhsVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
VecDestroy
(
&
(
nestVec
[
i
]));
}
}
...
@@ -145,20 +145,12 @@ namespace AMDiS {
...
@@ -145,20 +145,12 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::destroyMatrixData()"
);
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::destroyMatrixData()"
);
for
(
unsigned
int
i
=
0
;
i
<
nestMat
.
size
();
i
++
)
if
(
nestMat
[
i
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
]));
MatDestroy
(
&
petscMatrix
);
MatDestroy
(
&
petscMatrix
);
KSPDestroy
(
&
solver
);
KSPDestroy
(
&
solver
);
VecDestroy
(
&
petscSolVec
);
VecDestroy
(
&
petscTmpVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
if
(
nestMat
[
i
][
j
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
][
j
]));
}
delete
[]
nestMat
[
i
];
}
delete
[]
nestMat
;
}
}
...
@@ -209,32 +201,22 @@ namespace AMDiS {
...
@@ -209,32 +201,22 @@ namespace AMDiS {
cols
.
push_back
(
colIndex
);
cols
.
push_back
(
colIndex
);
values
.
push_back
(
value
(
*
icursor
));
values
.
push_back
(
value
(
*
icursor
));
}
}
MatSetValues
(
petscMat
rix
,
1
,
&
rowIndex
,
cols
.
size
(),
MatSetValues
(
petscMat
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
}
}
}
}
void
PetscSolverGlobalBlockMatrix
::
setDofVector
(
Vec
&
petscVec
,
void
PetscSolverGlobalBlockMatrix
::
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
DOFVector
<
double
>*
vec
)
int
nComponents
,
int
row
,
bool
rankOnly
)
{
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::setDofVector()"
);
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::setDofVector()"
);
// Traverse all used DOFs in the dof vector.
// Traverse all used DOFs in the dof vector.
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
if
(
rankOnly
&&
!
meshDistributor
->
getIsRankDof
(
dofIt
.
getDOFIndex
()))
int
index
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
continue
;
// Calculate global row index of the DOF.
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
// Calculate PETSc index of the row DOF.
int
index
=
nComponents
*
row
+
globalRowDof
;
double
value
=
*
dofIt
;
double
value
=
*
dofIt
;
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
View file @
2e22f957
...
@@ -52,11 +52,18 @@ namespace AMDiS {
...
@@ -52,11 +52,18 @@ namespace AMDiS {
void
setDofMatrix
(
Mat
&
petscMat
,
DOFMatrix
*
mat
);
void
setDofMatrix
(
Mat
&
petscMat
,
DOFMatrix
*
mat
);
/// Takes a DOF vector and sends its values to a given PETSc vector.
/// Takes a DOF vector and sends its values to a given PETSc vector.
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
);
int
nComponents
,
int
row
,
bool
rankOnly
=
false
);
virtual
void
setBlockPreconditioner
(
PC
&
pc
)
{
PCSetFromOptions
(
pc
);
}
protected:
protected:
Mat
**
nestMat
;
vector
<
Mat
>
nestMat
;
vector
<
Vec
>
nestVec
;
int
nComponents
;
int
nComponents
;
};
};
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
2e22f957
...
@@ -66,9 +66,9 @@ namespace AMDiS {
...
@@ -66,9 +66,9 @@ namespace AMDiS {
#if (DEBUG != 0)
#if (DEBUG != 0)
int
a
,
b
;
int
a
,
b
;
MatGetOwnershipRange
(
petscMatrix
,
&
a
,
&
b
);
MatGetOwnershipRange
(
petscMatrix
,
&
a
,
&
b
);
TEST_EXIT
(
a
==
meshDistributor
->
get
Rs
tart
()
*
nComponents
)
TEST_EXIT
(
a
==
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
)
(
"Wrong matrix ownership range!
\n
"
);
(
"Wrong matrix ownership range!
\n
"
);
TEST_EXIT
(
b
==
meshDistributor
->
get
Rs
tart
()
*
nComponents
+
nRankRows
)
TEST_EXIT
(
b
==
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
+
nRankRows
)
(
"Wrong matrix ownership range!
\n
"
);
(
"Wrong matrix ownership range!
\n
"
);
#endif
#endif
...
@@ -552,7 +552,7 @@ namespace AMDiS {
...
@@ -552,7 +552,7 @@ namespace AMDiS {
// This is the local row index of the local PETSc matrix.
// This is the local row index of the local PETSc matrix.
int
localPetscRowIdx
=
int
localPetscRowIdx
=
petscRowIdx
-
meshDistributor
->
get
Rs
tart
()
*
nComponents
;
petscRowIdx
-
meshDistributor
->
get
S
tart
Dofs
()
*
nComponents
;
TEST_EXIT_DBG
(
localPetscRowIdx
>=
0
&&
localPetscRowIdx
<
nRankRows
)
TEST_EXIT_DBG
(
localPetscRowIdx
>=
0
&&
localPetscRowIdx
<
nRankRows
)
(
"Should not happen!
\n
Debug info: localRowIdx = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d nCompontens = %d nRankRows = %d
\n
"
,