Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
ef9e1c0f
Commit
ef9e1c0f
authored
Jan 18, 2012
by
Thomas Witkowski
Browse files
Work on usage of different FE spaces in parallel computations.
parent
cfc99b53
Changes
6
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/MeshDistributor.cc
View file @
ef9e1c0f
...
...
@@ -1945,7 +1945,7 @@ namespace AMDiS {
// Stores for all rank owned DOFs a new global index.
DofIndexMap
rankDofsNewGlobalIndex
;
for
(
int
i
=
0
;
i
<
dofFeData
[
feSpace
].
nRankDofs
;
i
++
)
for
(
unsigned
int
i
=
0
;
i
<
rankDofs
.
size
()
;
i
++
)
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
dofFeData
[
feSpace
].
rStartDofs
;
...
...
@@ -1990,7 +1990,8 @@ namespace AMDiS {
}
// === Create now the local to global index and local to DOF index mappings. ===
// === Create now the local to global index and local to DOF ===
// === index mappings. ===
dofFeData
[
feSpace
].
mapLocalGlobalDofs
.
clear
();
dofFeData
[
feSpace
].
mapLocalDofIndex
.
clear
();
...
...
@@ -1999,11 +2000,11 @@ namespace AMDiS {
dofIt
!=
rankDofsNewGlobalIndex
.
end
();
++
dofIt
)
dofFeData
[
feSpace
].
mapLocalGlobalDofs
[
*
(
dofIt
->
first
)]
=
dofIt
->
second
;
for
(
int
i
=
0
;
i
<
dofFeData
[
feSpace
].
nRankDofs
;
i
++
)
for
(
unsigned
int
i
=
0
;
i
<
rankDofs
.
size
()
;
i
++
)
dofFeData
[
feSpace
].
mapLocalDofIndex
[
i
]
=
*
(
rankDofs
[
i
]);
// === Update
dof
admins due to new number of DOFs. ===
// === Update
DOF
admins due to new number of DOFs. ===
lastMeshChangeIndex
=
mesh
->
getChangeIndex
();
}
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
ef9e1c0f
...
...
@@ -167,7 +167,7 @@ namespace AMDiS {
return
feSpaces
;
}
/// Returns
\ref nRankDOFs,
the number of DOFs in
the rank mesh
.
/// Returns the number of DOFs in
rank's domain for a given FE space
.
inline
int
getNumberRankDofs
(
const
FiniteElemSpace
*
feSpace
)
{
FUNCNAME
(
"MeshDistributor::getNumberRankDofs()"
);
...
...
@@ -177,7 +177,21 @@ namespace AMDiS {
return
dofFeData
[
feSpace
].
nRankDofs
;
}
/// Returns \ref rStartDofs, the first global DOF index owned by rank.
/// Returns the number of DOFs in rank's domain for a set of FE spaces.
inline
int
getNumberRankDofs
(
vector
<
const
FiniteElemSpace
*>&
feSpaces
)
{
FUNCNAME
(
"MeshDistributor::getNumberRankDofs()"
);
int
result
=
0
;
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
TEST_EXIT_DBG
(
dofFeData
.
count
(
feSpaces
[
i
]))(
"Should not happen!
\n
"
);
result
+=
dofFeData
[
feSpaces
[
i
]].
nRankDofs
;
}
return
result
;
}
/// Returns the first global DOF index of an FE space, owned by rank.
inline
int
getStartDofs
(
const
FiniteElemSpace
*
feSpace
)
{
FUNCNAME
(
"MeshDistributor::getStartDofs()"
);
...
...
@@ -187,7 +201,22 @@ namespace AMDiS {
return
dofFeData
[
feSpace
].
rStartDofs
;
}
/// Returns \ref nOverallDofs, the global number of DOFs.
/// Returns the first global DOF index for a set of FE spaces, owned by rank.
inline
int
getStartDofs
(
vector
<
const
FiniteElemSpace
*>&
feSpaces
)
{
FUNCNAME
(
"MeshDistributor::getStartDofs()"
);
int
result
=
0
;
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
TEST_EXIT_DBG
(
dofFeData
.
count
(
feSpaces
[
i
]))(
"Should not happen!
\n
"
);
result
+=
dofFeData
[
feSpaces
[
i
]].
rStartDofs
;
}
return
result
;
}
/// Returns the global number of DOFs for a given FE space.
inline
int
getNumberOverallDofs
(
const
FiniteElemSpace
*
feSpace
)
{
FUNCNAME
(
"MeshDistributor::getNumberOverallDofs()"
);
...
...
@@ -197,6 +226,21 @@ namespace AMDiS {
return
dofFeData
[
feSpace
].
nOverallDofs
;
}
/// Returns the global number of DOFs for a set of FE spaces.
inline
int
getNumberOverallDofs
(
vector
<
const
FiniteElemSpace
*>&
feSpaces
)
{
FUNCNAME
(
"MeshDistributor::getNumberOverallDofs()"
);
int
result
=
0
;
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
TEST_EXIT_DBG
(
dofFeData
.
count
(
feSpaces
[
i
]))(
"Should not happen!
\n
"
);
result
+=
dofFeData
[
feSpaces
[
i
]].
nOverallDofs
;
}
return
result
;
}
inline
DofMapping
&
getMapLocalGlobalDofs
(
const
FiniteElemSpace
*
feSpace
)
{
FUNCNAME
(
"MeshDistributor::getMapLocalGlobalDofs()"
);
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
ef9e1c0f
...
...
@@ -71,4 +71,64 @@ namespace AMDiS {
VecScatterDestroy
(
&
scatter
);
}
vector
<
const
FiniteElemSpace
*>
PetscSolver
::
getFeSpaces
(
Matrix
<
DOFMatrix
*>
*
mat
)
{
FUNCNAME
(
"PetscSolver::getFeSpaces()"
);
int
nComponents
=
mat
->
getNumRows
();
vector
<
const
FiniteElemSpace
*>
result
(
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
((
*
mat
)[
i
][
j
])
{
result
[
i
]
=
(
*
mat
)[
i
][
j
]
->
getRowFeSpace
();
break
;
}
#if (DEBUG != 0)
// === In debug mode, we test if all FE spaces of the matrix are also ===
// === considered by the mesh distributor. ===
checkFeSpaces
(
result
);
#endif
return
result
;
}
vector
<
const
FiniteElemSpace
*>
PetscSolver
::
getFeSpaces
(
SystemVector
*
vec
)
{
FUNCNAME
(
"PetscSolver::getFeSpaces()"
);
int
nComponents
=
vec
->
getSize
();
vector
<
const
FiniteElemSpace
*>
result
(
nComponents
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
result
[
i
]
=
vec
->
getFeSpace
(
i
);
#if (DEBUG != 0)
// === In debug mode, we test if all FE spaces of the matrix are also ===
// === considered by the mesh distributor. ===
checkFeSpaces
(
result
);
#endif
return
result
;
}
void
PetscSolver
::
checkFeSpaces
(
vector
<
const
FiniteElemSpace
*>&
feSpaces
)
{
FUNCNAME
(
"PetscSolver::checkFeSpaces()"
);
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
bool
found
=
false
;
for
(
unsigned
int
j
=
0
;
j
<
meshDistributor
->
getFeSpaces
().
size
();
j
++
)
if
(
feSpaces
[
i
]
==
meshDistributor
->
getFeSpace
(
j
))
{
found
=
true
;
break
;
}
TEST_EXIT
(
found
)(
"FE spaces not found in mesh distributor!
\n
"
);
}
}
}
AMDiS/src/parallel/PetscSolver.h
View file @
ef9e1c0f
...
...
@@ -108,6 +108,16 @@ namespace AMDiS {
void
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
);
/// Checks if all given FE spaces are handled by the mesh distributor.
void
checkFeSpaces
(
vector
<
const
FiniteElemSpace
*>&
feSpaces
);
/// Returns a vector with the FE spaces of each row in the matrix. Thus, the
/// resulting vector may contain the same FE space multiple times.
vector
<
const
FiniteElemSpace
*>
getFeSpaces
(
Matrix
<
DOFMatrix
*>
*
mat
);
/// Returns a vector with the FE spaces of all components of a system vector.
vector
<
const
FiniteElemSpace
*>
getFeSpaces
(
SystemVector
*
vec
);
protected:
MeshDistributor
*
meshDistributor
;
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
ef9e1c0f
...
...
@@ -20,16 +20,14 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor object defined!
\n
"
);
TEST_EXIT_DBG
(
mat
)(
"No DOF matrix defined!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
int
nComponents
=
mat
->
getNumRows
(
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponents
;
int
n
OverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponent
s
;
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
createGlobalDofMapping
(
feSpaces
)
;
int
n
RankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
);
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
)
;
// === Create PETSc vector (solution and a temporary vector). ===
...
...
@@ -59,8 +57,13 @@ namespace AMDiS {
// === Create PETSc matrix with the computed nnz data structure. ===
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
0
,
d_nnz
,
0
,
o_nnz
,
&
petscMatrix
);
// MatCreateMPIAIJ(PETSC_COMM_WORLD, nRankRows, nRankRows,
// nOverallRows, nOverallRows,
// 0, d_nnz, 0, o_nnz, &petscMatrix);
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
petscMatrix
);
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
...
...
@@ -69,15 +72,16 @@ namespace AMDiS {
#if (DEBUG != 0)
int
a
,
b
;
MatGetOwnershipRange
(
petscMatrix
,
&
a
,
&
b
);
TEST_EXIT
(
a
==
meshDistributor
->
getStartDofs
(
feSpace
)
*
nComponent
s
)
TEST_EXIT
(
a
==
meshDistributor
->
getStartDofs
(
feSpaces
)
)
(
"Wrong matrix ownership range!
\n
"
);
TEST_EXIT
(
b
==
meshDistributor
->
getStartDofs
(
feSpace
)
*
nComponent
s
+
nRankRows
)
TEST_EXIT
(
b
==
meshDistributor
->
getStartDofs
(
feSpaces
)
+
nRankRows
)
(
"Wrong matrix ownership range!
\n
"
);
#endif
// === Transfer values from DOF matrices to the PETSc matrix. ===
int
nComponents
=
mat
->
getNumRows
();
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
((
*
mat
)[
i
][
j
])
...
...
@@ -110,22 +114,19 @@ namespace AMDiS {
void
PetscSolverGlobalMatrix
::
fillPetscRhs
(
SystemVector
*
vec
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscRhs()"
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
TEST_EXIT_DBG
(
vec
)(
"No DOF vector defined!
\n
"
);
TEST_EXIT_DBG
(
meshDistributor
)(
"No mesh distributor defined!
\n
"
);
int
nComponents
=
vec
->
getSize
(
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpace
)
*
nComponent
s
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpace
)
*
nComponent
s
;
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
vec
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
(
feSpaces
)
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
(
feSpaces
)
;
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
for
(
int
i
=
0
;
i
<
vec
->
getSize
();
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
vec
->
getSize
(),
i
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -155,15 +156,16 @@ namespace AMDiS {
KSPSolve
(
solver
,
petscRhsVec
,
petscSolVec
);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
(
feSpace
);
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
int
c
=
0
;
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
DOFVector
<
double
>
&
dv
=
*
(
vec
.
getDOFVector
(
i
));
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
(
dv
.
getFeSpace
());
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
feSpace
,
j
)]
=
vecPointer
[
j
*
nComponents
+
i
];
dv
[
meshDistributor
->
mapLocalToDofIndex
(
feSpace
,
j
)]
=
vecPointer
[
c
++
];
}
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
...
...
@@ -200,8 +202,6 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::setDofMatrix()"
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
TEST_EXIT
(
mat
)(
"No DOFMatrix!
\n
"
);
using
mtl
::
tag
::
row
;
using
mtl
::
tag
::
nz
;
using
mtl
::
begin
;
using
mtl
::
end
;
...
...
@@ -229,7 +229,8 @@ namespace AMDiS {
cend
=
end
<
row
>
(
mat
->
getBaseMatrix
());
cursor
!=
cend
;
++
cursor
)
{
// Global index of the current row DOF.
int
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
feSpace
,
*
cursor
);
int
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
mat
->
getRowFeSpace
(),
*
cursor
);
// Test if the current row DOF is a periodic DOF.
bool
periodicRow
=
meshDistributor
->
isPeriodicDof
(
globalRowDof
);
...
...
@@ -237,8 +238,9 @@ namespace AMDiS {
// === Row DOF index is not periodic. ===
// Calculate PETSc row index.
int
rowIndex
=
globalRowDof
*
dispMult
+
dispAddRow
;
TEST_EXIT_DBG
(
dofToGlobalIndex
.
count
(
make_pair
(
*
cursor
,
dispAddRow
)))
(
"Should not happen!
\n
"
);
int
rowIndex
=
dofToGlobalIndex
[
make_pair
(
*
cursor
,
dispAddRow
)];
cols
.
clear
();
values
.
clear
();
...
...
@@ -248,11 +250,13 @@ namespace AMDiS {
// Global index of the current column index.
int
globalColDof
=
meshDistributor
->
mapLocalToGlobal
(
f
eSpace
,
col
(
*
icursor
));
meshDistributor
->
mapLocalToGlobal
(
mat
->
getColF
eSpace
()
,
col
(
*
icursor
));
// Test if the current col dof is a periodic dof.
bool
periodicCol
=
meshDistributor
->
isPeriodicDof
(
globalColDof
);
// Calculate PETSc col index.
int
colIndex
=
globalColDof
*
dispMult
+
dispAddCol
;
TEST_EXIT_DBG
(
dofToGlobalIndex
.
count
(
make_pair
(
col
(
*
icursor
),
dispAddCol
)))
(
"Should not happen!
\n
"
);
int
colIndex
=
dofToGlobalIndex
[
make_pair
(
col
(
*
icursor
),
dispAddCol
)];
// Ignore all zero entries, expect it is a diagonal entry.
if
(
value
(
*
icursor
)
==
0.0
&&
rowIndex
!=
colIndex
)
...
...
@@ -325,7 +329,8 @@ namespace AMDiS {
icursor
!=
icend
;
++
icursor
)
{
// Global index of the current column index.
int
globalColDof
=
meshDistributor
->
mapLocalToGlobal
(
feSpace
,
col
(
*
icursor
));
int
globalColDof
=
meshDistributor
->
mapLocalToGlobal
(
mat
->
getColFeSpace
(),
col
(
*
icursor
));
// Ignore all zero entries, expect it is a diagonal entry.
if
(
value
(
*
icursor
)
==
0.0
&&
globalRowDof
!=
globalColDof
)
...
...
@@ -427,7 +432,7 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::setDofVector()"
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
const
FiniteElemSpace
*
feSpace
=
vec
->
getFeSpace
();
// Traverse all used DOFs in the dof vector.
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
...
...
@@ -439,7 +444,9 @@ namespace AMDiS {
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
feSpace
,
dofIt
.
getDOFIndex
());
// Calculate PETSc index of the row DOF.
int
index
=
globalRowDof
*
dispMult
+
dispAdd
;
TEST_EXIT_DBG
(
dofToGlobalIndex
.
count
(
make_pair
(
dofIt
.
getDOFIndex
(),
dispAdd
)))
(
"Should not happen!
\n
"
);
int
index
=
dofToGlobalIndex
[
make_pair
(
dofIt
.
getDOFIndex
(),
dispAdd
)];
if
(
meshDistributor
->
isPeriodicDof
(
globalRowDof
))
{
std
::
set
<
int
>&
perAsc
=
meshDistributor
->
getPerDofAssociations
(
globalRowDof
);
...
...
@@ -652,4 +659,66 @@ namespace AMDiS {
d_nnz
[
i
]
=
std
::
min
(
d_nnz
[
i
],
nRankRows
);
}
void
PetscSolverGlobalMatrix
::
createGlobalDofMapping
(
vector
<
const
FiniteElemSpace
*>
&
feSpaces
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createGlobalDofMapping()"
);
int
offset
=
meshDistributor
->
getStartDofs
(
feSpaces
);
Mesh
*
mesh
=
meshDistributor
->
getMesh
();
for
(
unsigned
int
i
=
0
;
i
<
feSpaces
.
size
();
i
++
)
{
// === Create indices for all DOFs in rank' domain. ===
std
::
set
<
const
DegreeOfFreedom
*>
rankDofSet
;
mesh
->
getAllDofs
(
feSpaces
[
i
],
rankDofSet
);
for
(
std
::
set
<
const
DegreeOfFreedom
*>::
iterator
it
=
rankDofSet
.
begin
();
it
!=
rankDofSet
.
end
();
++
it
)
if
(
meshDistributor
->
getIsRankDof
(
feSpaces
[
i
],
**
it
))
{
int
localIndex
=
**
it
;
int
globalIndex
=
meshDistributor
->
mapLocalToGlobal
(
feSpaces
[
i
],
localIndex
)
-
meshDistributor
->
getStartDofs
(
feSpaces
[
i
])
+
offset
;
dofToGlobalIndex
[
make_pair
(
localIndex
,
i
)]
=
globalIndex
;
}
// === Communicated interior boundary DOFs between domains. ===
StdMpi
<
vector
<
int
>
>
stdMpi
(
meshDistributor
->
getMpiComm
());
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getSendDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
{
vector
<
DegreeOfFreedom
>
sendGlobalDofs
;
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
int
globalIndex
=
dofToGlobalIndex
[
make_pair
(
it
.
getDofIndex
(),
i
)];
sendGlobalDofs
.
push_back
(
globalIndex
);
}
stdMpi
.
send
(
it
.
getRank
(),
sendGlobalDofs
);
}
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
stdMpi
.
recv
(
it
.
getRank
());
stdMpi
.
startCommunication
();
for
(
DofComm
::
Iterator
it
(
meshDistributor
->
getRecvDofs
(),
feSpaces
[
i
]);
!
it
.
end
();
it
.
nextRank
())
for
(;
!
it
.
endDofIter
();
it
.
nextDof
())
{
DegreeOfFreedom
localIndex
=
it
.
getDofIndex
();
int
globalIndex
=
stdMpi
.
getRecvData
(
it
.
getRank
())[
it
.
getDofCounter
()];
dofToGlobalIndex
[
make_pair
(
localIndex
,
i
)]
=
globalIndex
;
}
// === Update offset. ===
offset
+=
meshDistributor
->
getNumberRankDofs
(
feSpaces
[
i
]);
}
}
}
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
View file @
ef9e1c0f
...
...
@@ -67,6 +67,8 @@ namespace AMDiS {
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
disMult
=
1
,
int
dispAdd
=
0
,
bool
rankOnly
=
false
);
void
createGlobalDofMapping
(
vector
<
const
FiniteElemSpace
*>
&
feSpaces
);
protected:
/// Arrays definig the non zero pattern of Petsc's matrix.
int
*
d_nnz
,
*
o_nnz
;
...
...
@@ -88,6 +90,8 @@ namespace AMDiS {
/// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields.
bool
alwaysCreateNnzStructure
;
map
<
pair
<
DegreeOfFreedom
,
int
>
,
int
>
dofToGlobalIndex
;
};
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment