Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
A
amdis
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
iwr
amdis
Commits
a125ff82
Commit
a125ff82
authored
12 years ago
by
Thomas Witkowski
Browse files
Options
Downloads
Patches
Plain Diff
Blub, go home.
parent
0e951559
Branches
Branches containing commit
Tags
Tags containing commit
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
AMDiS/src/parallel/ParallelDofMapping.h
+10
-0
10 additions, 0 deletions
AMDiS/src/parallel/ParallelDofMapping.h
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+33
-54
33 additions, 54 deletions
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
with
43 additions
and
54 deletions
AMDiS/src/parallel/ParallelDofMapping.h
+
10
−
0
View file @
a125ff82
...
@@ -313,6 +313,11 @@ namespace AMDiS {
...
@@ -313,6 +313,11 @@ namespace AMDiS {
needMatIndexFromGlobal
=
global
;
needMatIndexFromGlobal
=
global
;
}
}
inline
bool
isMatIndexFromGlobal
()
{
return
needMatIndexFromGlobal
;
}
/// Access the DOF mapping for a given FE space.
/// Access the DOF mapping for a given FE space.
inline
FeSpaceDofMap
&
operator
[](
const
FiniteElemSpace
*
feSpace
)
inline
FeSpaceDofMap
&
operator
[](
const
FiniteElemSpace
*
feSpace
)
{
{
...
@@ -436,6 +441,11 @@ namespace AMDiS {
...
@@ -436,6 +441,11 @@ namespace AMDiS {
/// mapping from DOF indices to matrix row indices is defined on local
/// mapping from DOF indices to matrix row indices is defined on local
/// or global DOF indices. If true, the mapping is to specify and to use
/// or global DOF indices. If true, the mapping is to specify and to use
/// on global ones, otherwise on local DOF indices.
/// on global ones, otherwise on local DOF indices.
/// In most scenarios the mapping stored on local DOF indices is what we
/// want to have. Only when periodic boundary conditions are used together
/// with some global matrix approache, the matrix indices must be stored
/// also for DOFs that are not part of the local subdomain. Thus, the
/// mapping will be stored on global DOF indices.
bool
needMatIndexFromGlobal
;
bool
needMatIndexFromGlobal
;
/// Maps from FE space pointers to DOF mappings.
/// Maps from FE space pointers to DOF mappings.
...
...
This diff is collapsed.
Click to expand it.
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+
33
−
54
View file @
a125ff82
...
@@ -65,12 +65,6 @@ namespace AMDiS {
...
@@ -65,12 +65,6 @@ namespace AMDiS {
VecCreateMPI
(
mpiCommGlobal
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
mpiCommGlobal
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
int
testddd
=
1
;
Parameters
::
get
(
"block size"
,
testddd
);
if
(
testddd
>
1
)
VecSetBlockSize
(
petscSolVec
,
testddd
);
// === Create PETSc matrix with the computed nnz data structure. ===
// === Create PETSc matrix with the computed nnz data structure. ===
...
@@ -78,11 +72,6 @@ namespace AMDiS {
...
@@ -78,11 +72,6 @@ namespace AMDiS {
nOverallRows
,
nOverallRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_nnz
,
&
matIntInt
);
0
,
d_nnz
,
0
,
o_nnz
,
&
matIntInt
);
if
(
testddd
>
1
)
{
MatSetBlockSize
(
matIntInt
,
testddd
);
MSG
(
"MAT SET BLOCK SIZE: %d
\n
"
,
testddd
);
}
#if (DEBUG != 0)
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
#endif
...
@@ -328,33 +317,40 @@ namespace AMDiS {
...
@@ -328,33 +317,40 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscRhs()"
);
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscRhs()"
);
if
(
coarseSpaceMap
)
{
VecCreateMPI
(
mpiCommGlobal
,
fillPetscRhsWithCoarseSpace
(
vec
);
interiorMap
->
getRankDofs
(),
return
;
nGlobalOverallInterior
,
}
&
rhsInterior
);
if
(
coarseSpaceMap
)
VecCreateMPI
(
mpiCommGlobal
,
coarseSpaceMap
->
getRankDofs
(),
coarseSpaceMap
->
getOverallDofs
(),
&
rhsCoarseSpace
);
TEST_EXIT_DBG
(
vec
)(
"No DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
vec
)(
"No DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
interiorMap
)(
"No parallel DOF map defined!
\n
"
);
TEST_EXIT_DBG
(
interiorMap
)(
"No parallel DOF map defined!
\n
"
);
int
nRankRows
=
interiorMap
->
getRankDofs
();
if
(
coarseSpaceMap
)
{
int
nOverallRows
=
interiorMap
->
getOverallDofs
();
fillPetscRhsWithCoarseSpace
(
vec
);
}
else
{
VecCreateMPI
(
mpiCommGlobal
,
nRankRows
,
nOverallRows
,
&
rhsInterior
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
int
testddd
=
1
;
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
Parameters
::
get
(
"block size"
,
testddd
);
}
if
(
testddd
>
1
)
VecSetBlockSize
(
rhsInterior
,
testddd
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
rhsInterior
,
vec
->
getDOFVector
(
i
),
i
);
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
if
(
coarseSpaceMap
)
{
VecAssemblyBegin
(
rhsCoarseSpace
);
VecAssemblyEnd
(
rhsCoarseSpace
);
}
if
(
removeRhsNullSpace
)
{
if
(
removeRhsNullSpace
)
{
TEST_EXIT_DBG
(
coarseSpaceMap
==
NULL
)(
"Not supported!
\n
"
);
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MSG
(
"Remove constant null space from the RHS!
\n
"
);
MatNullSpace
sp
;
MatNullSpace
sp
;
MatNullSpaceCreate
(
mpiCommGlobal
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
MatNullSpaceCreate
(
mpiCommGlobal
,
PETSC_TRUE
,
0
,
PETSC_NULL
,
&
sp
);
...
@@ -368,18 +364,6 @@ namespace AMDiS {
...
@@ -368,18 +364,6 @@ namespace AMDiS {
{
{
FUNCNAME
(
"SubDomainSolver::fillPetscRhs()"
);
FUNCNAME
(
"SubDomainSolver::fillPetscRhs()"
);
VecCreateMPI
(
mpiCommGlobal
,
interiorMap
->
getRankDofs
(),
nGlobalOverallInterior
,
&
rhsInterior
);
if
(
coarseSpaceMap
)
VecCreateMPI
(
mpiCommGlobal
,
coarseSpaceMap
->
getRankDofs
(),
coarseSpaceMap
->
getOverallDofs
(),
&
rhsCoarseSpace
);
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
{
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
vec
->
getDOFVector
(
i
)
->
getFeSpace
();
const
FiniteElemSpace
*
feSpace
=
vec
->
getDOFVector
(
i
)
->
getFeSpace
();
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
DOFVector
<
double
>::
Iterator
dofIt
(
vec
->
getDOFVector
(
i
),
USED_DOFS
);
...
@@ -394,14 +378,6 @@ namespace AMDiS {
...
@@ -394,14 +378,6 @@ namespace AMDiS {
}
}
}
}
}
}
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
if
(
coarseSpaceMap
)
{
VecAssemblyBegin
(
rhsCoarseSpace
);
VecAssemblyEnd
(
rhsCoarseSpace
);
}
}
}
...
@@ -748,23 +724,26 @@ namespace AMDiS {
...
@@ -748,23 +724,26 @@ namespace AMDiS {
DegreeOfFreedom
globalRowDof
=
DegreeOfFreedom
globalRowDof
=
(
*
interiorMap
)[
feSpace
][
dofIt
.
getDOFIndex
()].
global
;
(
*
interiorMap
)[
feSpace
][
dofIt
.
getDOFIndex
()].
global
;
// Get PETSc's mat index of the row DOF.
// Get PETSc's mat index of the row DOF.
int
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
);
int
index
=
0
;
if
(
interiorMap
->
isMatIndexFromGlobal
())
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
globalRowDof
);
else
index
=
interiorMap
->
getMatIndex
(
nRowVec
,
dofIt
.
getDOFIndex
());
if
(
perMap
.
isPeriodic
(
feSpace
,
globalRowDof
))
{
if
(
perMap
.
isPeriodic
(
feSpace
,
globalRowDof
))
{
std
::
set
<
int
>&
perAsc
=
perMap
.
getAssociations
(
feSpace
,
globalRowDof
);
std
::
set
<
int
>&
perAsc
=
perMap
.
getAssociations
(
feSpace
,
globalRowDof
);
double
value
=
*
dofIt
/
(
perAsc
.
size
()
+
1.0
);
double
value
=
*
dofIt
/
(
perAsc
.
size
()
+
1.0
);
VecSetValue
s
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
VecSetValue
(
petscVec
,
index
,
value
,
ADD_VALUES
);
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
for
(
std
::
set
<
int
>::
iterator
perIt
=
perAsc
.
begin
();
perIt
!=
perAsc
.
end
();
++
perIt
)
{
perIt
!=
perAsc
.
end
();
++
perIt
)
{
int
mappedDof
=
perMap
.
map
(
feSpace
,
*
perIt
,
globalRowDof
);
int
mappedDof
=
perMap
.
map
(
feSpace
,
*
perIt
,
globalRowDof
);
int
mappedIndex
=
interiorMap
->
getMatIndex
(
nRowVec
,
mappedDof
);
int
mappedIndex
=
interiorMap
->
getMatIndex
(
nRowVec
,
mappedDof
);
VecSetValue
s
(
petscVec
,
1
,
&
mappedIndex
,
&
value
,
ADD_VALUES
);
VecSetValue
(
petscVec
,
mappedIndex
,
value
,
ADD_VALUES
);
}
}
}
else
{
}
else
{
// The DOF index is not periodic.
// The DOF index is not periodic.
double
value
=
*
dofIt
;
VecSetValue
(
petscVec
,
index
,
*
dofIt
,
ADD_VALUES
);
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
}
}
}
}
}
}
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment