Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
a125ff82
Commit
a125ff82
authored
May 15, 2012
by
Thomas Witkowski
Browse files
Blub, go home.
parent
0e951559
Changes
2
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/ParallelDofMapping.h
View file @
a125ff82
...
...
@@ -313,6 +313,11 @@ namespace AMDiS {
needMatIndexFromGlobal
=
global
;
}
inline
bool
isMatIndexFromGlobal
()
{
return
needMatIndexFromGlobal
;
}
/// Access the DOF mapping for a given FE space.
inline
FeSpaceDofMap
&
operator
[](
const
FiniteElemSpace
*
feSpace
)
{
...
...
@@ -436,6 +441,11 @@ namespace AMDiS {
/// mapping from DOF indices to matrix row indices is defined on local
/// or global DOF indices. If true, the mapping is to specify and to use
/// on global ones, otherwise on local DOF indices.
/// In most scenarios the mapping stored on local DOF indices is what we
/// want to have. Only when periodic boundary conditions are used together
/// with some global matrix approache, the matrix indices must be stored
/// also for DOFs that are not part of the local subdomain. Thus, the
/// mapping will be stored on global DOF indices.
bool
needMatIndexFromGlobal
;
/// Maps from FE space pointers to DOF mappings.
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
a125ff82
...
...
@@ -65,12 +65,6 @@ namespace AMDiS {
VecCreateMPI
(
mpiCommGlobal
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
if
(
testddd
>
1
)
VecSetBlockSize
(
petscSolVec
,
testddd
);
// === Create PETSc matrix with the computed nnz data structure. ===
...
...
@@ -78,11 +72,6 @@ namespace AMDiS {
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_nnz
,
&
matIntInt
);
if
(
testddd
>
1
)
{
MatSetBlockSize
(
matIntInt
,
testddd
);
MSG
(
"MAT SET BLOCK SIZE: %d
\n
"
,
testddd
);
}
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
...
...
@@ -328,33 +317,40 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscRhs()"
);
if
(
coarseSpaceMap
)
{
fillPetscRhsWithCoarseSpace
(
vec
);
return
;
}
VecCreateMPI
(
mpiCommGlobal
,
interiorMap
->
getRankDofs
(),
nGlobalOverallInterior
,
&
rhsInterior
);
if
(
coarseSpaceMap
)
VecCreateMPI
(
mpiCommGlobal
,
coarseSpaceMap
->
getRankDofs
(),
coarseSpaceMap
->
getOverallDofs
(),
&
rhsCoarseSpace
);
TEST_EXIT_DBG
(
vec
)(
"No DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
interiorMap
)(
"No parallel DOF map defined!
\n
"
);
int
nRankRows
=
interiorMap
->
getRankDofs
();
int
nOverallRows
=
interiorMap
->
getOverallDofs
();
VecCreateMPI
(
mpiCommGlobal
,
nRankRows
,
nOverallRows
,
&
rhsInterior
);
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
if
(
testddd
>
1
)
VecSetBlockSize
(
rhsInterior
,
testddd
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
if
(
coarseSpaceMap
)
{
fillPetscRhsWithCoarseSpace
(
vec
);
}
else
{
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
}
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
if
(
coarseSpaceMap
)
{
VecAssemblyBegin
(
rhsCoarseSpace
);
VecAssemblyEnd
(
rhsCoarseSpace
);
}
if
(
removeRhsNullSpace
)
{
TEST_EXIT_DBG
(
coarseSpaceMap
==
NULL
)(
"Not supported!
\n
"
);
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MatNullSpace
sp
;
MatNullSpaceCreate
(
mpiCommGlobal
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
...
...
@@ -368,18 +364,6 @@ namespace AMDiS {
{
FUNCNAME
(
"SubDomainSolver::fillPetscRhs()"
);
VecCreateMPI
(
mpiCommGlobal
,
interiorMap
->
getRankDofs
(),
nGlobalOverallInterior
,
&
rhsInterior
);
if
(
coarseSpaceMap
)
VecCreateMPI
(
mpiCommGlobal
,
coarseSpaceMap
->
getRankDofs
(),
coarseSpaceMap
->
getOverallDofs
(),
&
rhsCoarseSpace
);
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
vec
->
getDOFVector
(
i
)
->
getFeSpace
();
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
...
...
@@ -394,14 +378,6 @@ namespace AMDiS {
}
}
}
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
if
(
coarseSpaceMap
)
{
VecAssemblyBegin
(
rhsCoarseSpace
);
VecAssemblyEnd
(
rhsCoarseSpace
);
}
}
...
...
@@ -748,23 +724,26 @@ namespace AMDiS {
DegreeOfFreedom
globalRowDof
=
(
*
interiorMap
)[
feSpace
][
dofIt
.
getDOFIndex
()].
global
;
// Get PETSc's mat index of the row DOF.
int
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
);
int
index
=
0
;
if
(
interiorMap
->
isMatIndexFromGlobal
())
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
);
else
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
dofIt
.
getDOFIndex
());
if
(
perMap
.
isPeriodic
(
feSpace
,
globalRowDof
))
{
std
::
set
<
int
>&
perAsc
=
perMap
.
getAssociations
(
feSpace
,
globalRowDof
);
double
value
=
*
dofIt
/
(
perAsc
.
size
()
+
1.0
);
VecSetValue
s
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
VecSetValue
(
petscVec
,
index
,
value
,
ADD_VALUES
);
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
int
mappedDof
=
perMap
.
map
(
feSpace
,
*
perIt
,
globalRowDof
);
int
mappedIndex
=
interiorMap
->
getMatIndex
(
nRowVec
,
mappedDof
);
VecSetValue
s
(
petscVec
,
1
,
&
mappedIndex
,
&
value
,
ADD_VALUES
);
VecSetValue
(
petscVec
,
mappedIndex
,
value
,
ADD_VALUES
);
}
}
else
{
// The DOF index is not periodic.
double
value
=
*
dofIt
;
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
VecSetValue
(
petscVec
,
index
,
*
dofIt
,
ADD_VALUES
);
}
}
}
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment