Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
83c8b38d
Commit
83c8b38d
authored
Aug 20, 2012
by
Thomas Witkowski
Browse files
On the way to a better world and I am not payed for greate commit comments.
parent
ae15b32c
Changes
10
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/AMDiS_fwd.h
View file @
83c8b38d
...
...
@@ -100,6 +100,7 @@ namespace AMDiS {
#ifdef HAVE_PARALLEL_DOMAIN_AMDIS
class
ElementObjectDatabase
;
class
FeSpaceDofMap
;
class
MatrixNnzStructure
;
class
MeshLevelData
;
#endif
...
...
AMDiS/src/parallel/MatrixNnzStructure.cc
View file @
83c8b38d
...
...
@@ -32,7 +32,7 @@ namespace AMDiS {
}
void
MatrixNnzStructure
::
create
(
Matrix
<
DOFMatrix
*>
*
mat
,
void
MatrixNnzStructure
::
create
(
Matrix
<
DOFMatrix
*>
&
mat
,
MPI
::
Intracomm
mpiComm
,
ParallelDofMapping
&
rowDofMap
,
ParallelDofMapping
&
colDofMap
,
...
...
@@ -69,15 +69,15 @@ namespace AMDiS {
// receiving rank.
map
<
pair
<
DegreeOfFreedom
,
int
>
,
int
>
sendDofToRank
;
int
nComponents
=
mat
->
getNumRows
();
int
nComponents
=
mat
.
getNumRows
();
// First, create for all ranks, to which we send data to, MatrixNnzEntry
// object with 0 entries.
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
NULL
;
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
(
(
*
mat
)
[
i
][
j
])
feSpace
=
(
*
mat
)
[
i
][
j
]
->
getRowFeSpace
();
if
(
mat
[
i
][
j
])
feSpace
=
mat
[
i
][
j
]
->
getRowFeSpace
();
TEST_EXIT_DBG
(
feSpace
)(
"No FE space found!
\n
"
);
...
...
@@ -95,8 +95,8 @@ namespace AMDiS {
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
const
FiniteElemSpace
*
feSpace
=
NULL
;
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
if
(
(
*
mat
)
[
i
][
j
])
feSpace
=
(
*
mat
)
[
i
][
j
]
->
getRowFeSpace
();
if
(
mat
[
i
][
j
])
feSpace
=
mat
[
i
][
j
]
->
getRowFeSpace
();
for
(
DofComm
::
Iterator
it
(
rowDofMap
.
getDofComm
().
getSendDofs
(),
feSpace
);
!
it
.
end
();
it
.
nextRank
())
...
...
@@ -108,7 +108,7 @@ namespace AMDiS {
for
(
int
rowComp
=
0
;
rowComp
<
nComponents
;
rowComp
++
)
{
for
(
int
colComp
=
0
;
colComp
<
nComponents
;
colComp
++
)
{
DOFMatrix
*
dofMat
=
(
*
mat
)
[
rowComp
][
colComp
];
DOFMatrix
*
dofMat
=
mat
[
rowComp
][
colComp
];
if
(
!
dofMat
)
continue
;
...
...
AMDiS/src/parallel/MatrixNnzStructure.h
View file @
83c8b38d
...
...
@@ -40,7 +40,7 @@ namespace AMDiS {
void
clear
();
void
create
(
Matrix
<
DOFMatrix
*>
*
mat
,
void
create
(
Matrix
<
DOFMatrix
*>
&
mat
,
MPI
::
Intracomm
mpiComm
,
ParallelDofMapping
&
rowDofMap
,
ParallelDofMapping
&
colDofMap
,
...
...
@@ -48,7 +48,7 @@ namespace AMDiS {
const
ElementObjectDatabase
&
elObjDb
,
bool
localMatrix
=
false
);
void
create
(
Matrix
<
DOFMatrix
*>
*
mat
,
void
create
(
Matrix
<
DOFMatrix
*>
&
mat
,
MPI
::
Intracomm
mpiComm
,
ParallelDofMapping
&
dofMap
,
PeriodicMap
*
perMap
,
...
...
AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc
View file @
83c8b38d
...
...
@@ -12,24 +12,39 @@
#include
"AMDiS.h"
#include
"parallel/ParallelCoarseSpaceMatVec.h"
#include
"parallel/MatrixNnzStructure.h"
namespace
AMDiS
{
using
namespace
std
;
void
ParallelCoarseSpaceMatVec
::
create
(
ParallelDofMapping
*
iMap
,
map
<
int
,
ParallelDofMapping
*>
cMap
,
int
subdomainLevel
,
MPI
::
Intracomm
mpiCommLocal
,
MPI
::
Intracomm
mpiCommGlobal
)
ParallelCoarseSpaceMatVec
::
ParallelCoarseSpaceMatVec
()
:
lastMeshNnz
(
0
),
alwaysCreateNnzStructure
(
false
)
{
FUNCNAME
(
"ParallelCoarseSpaceMatVec::update()"
);
Parameters
::
get
(
"parallel->always create nnz structure"
,
alwaysCreateNnzStructure
);
}
void
ParallelCoarseSpaceMatVec
::
init
(
ParallelDofMapping
*
iMap
,
map
<
int
,
ParallelDofMapping
*>
cMap
,
int
sdLevel
,
MPI
::
Intracomm
mcLocal
,
MPI
::
Intracomm
mcGlobal
,
MeshDistributor
*
meshDist
)
{
FUNCNAME
(
"ParallelCoarseSpaceMatVec:init()"
);
interiorMap
=
iMap
;
coarseSpaceMap
=
cMap
;
subdomainLevel
=
sdLevel
;
mpiCommLocal
=
mcLocal
;
mpiCommGlobal
=
mcGlobal
;
meshDistributor
=
meshDist
;
vector
<
ParallelDofMapping
*>
uniqueCoarseMap
;
uniqueCoarseMap
.
clear
()
;
if
(
coarseSpaceMap
.
size
())
{
std
::
set
<
ParallelDofMapping
*>
tmp
;
for
(
map
<
int
,
ParallelDofMapping
*>::
iterator
it
=
coarseSpaceMap
.
begin
();
...
...
@@ -59,30 +74,80 @@ namespace AMDiS {
TEST_EXIT_DBG
(
found
)(
"Should not happen!
\n
"
);
}
}
void
ParallelCoarseSpaceMatVec
::
create
(
Matrix
<
DOFMatrix
*>&
seqMat
)
{
FUNCNAME
(
"ParallelCoarseSpaceMatVec::create()"
);
// === If required, recompute non zero structure of the matrix. ===
bool
localMatrix
=
(
coarseSpaceMap
.
size
()
&&
subdomainLevel
==
0
);
if
(
checkMeshChange
(
seqMat
,
localMatrix
))
{
int
nMat
=
uniqueCoarseMap
.
size
()
+
1
;
nnz
.
resize
(
nMat
);
for
(
int
i
=
0
;
i
<
nMat
;
i
++
)
{
nnz
.
resize
(
nMat
);
for
(
int
j
=
0
;
j
<
nMat
;
j
++
)
nnz
[
i
][
j
].
clear
();
}
nnz
[
0
][
0
].
create
(
seqMat
,
mpiCommGlobal
,
*
interiorMap
,
(
coarseSpaceMap
.
size
()
==
0
?
&
(
meshDistributor
->
getPeriodicMap
())
:
NULL
),
meshDistributor
->
getElementObjectDb
(),
localMatrix
);
for
(
int
i
=
0
;
i
<
nMat
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nMat
;
j
++
)
{
if
(
i
==
0
&&
j
==
0
)
continue
;
if
(
i
==
j
)
{
nnz
[
i
][
j
].
create
(
seqMat
,
mpiCommGlobal
,
coarseSpaceMap
,
NULL
,
meshDistributor
->
getElementObjectDb
());
}
else
{
ParallelDofMapping
*
rowMap
=
NULL
;
ParallelDofMapping
*
colMap
=
NULL
;
nnz
[
i
][
j
].
create
(
seqMat
,
mpiCommGlobal
,
*
rowMap
,
*
colMap
,
NULL
,
meshDistributor
->
getElementObjectDb
());
/*
nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
*/
}
}
}
}
// === Create PETSc matrix with the computed nnz data structure. ===
int
nRankRows
=
interiorMap
->
getRankDofs
();
int
nOverallRows
=
interiorMap
->
getOverallDofs
();
bool
localMatrix
=
(
coarseSpaceMap
.
size
()
&&
subdomainLevel
==
0
);
if
(
localMatrix
)
{
MatCreateSeqAIJ
(
mpiCommLocal
,
nRankRows
,
n
Overall
Rows
,
0
,
PETSC_NULL
,
MatCreateSeqAIJ
(
mpiCommLocal
,
nRankRows
,
n
Rank
Rows
,
10
0
,
PETSC_NULL
,
&
mat
[
0
][
0
]);
MatSetOption
(
mat
[
0
][
0
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
else
{
MatCreateAIJ
(
mpiCommGlobal
,
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
0
,
PETSC_NULL
,
0
,
PETSC_NULL
,
10
0
,
PETSC_NULL
,
10
0
,
PETSC_NULL
,
&
mat
[
0
][
0
]);
MatSetOption
(
mat
[
0
][
0
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
}
if
(
coarseSpaceMap
.
size
())
{
int
nCoarseMap
=
uniqueCoarseMap
.
size
();
for
(
int
i
=
0
;
i
<
nCoarseMap
;
i
++
)
{
ParallelDofMapping
*
cMap
=
uniqueCoarseMap
[
i
];
...
...
@@ -92,11 +157,11 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
0
,
PETSC_NULL
,
0
,
PETSC_NULL
,
&
mat
[
i
][
i
]);
10
0
,
PETSC_NULL
,
10
0
,
PETSC_NULL
,
&
mat
[
i
+
1
][
i
+
1
]);
MSG
(
"REMOVE THIS LINE WHEN FINISHED!
\n
"
);
MatSetOption
(
mat
[
i
][
i
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatSetOption
(
mat
[
i
+
1
][
i
+
1
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
for
(
int
j
=
0
;
j
<
nCoarseMap
+
1
;
j
++
)
{
int
nRowsRankMat
=
(
j
==
0
?
nRankRows
:
uniqueCoarseMap
[
j
-
1
]
->
getRankDofs
());
...
...
@@ -113,7 +178,7 @@ namespace AMDiS {
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankMat
,
nRowsRankCoarse
,
nRowsOverallMat
,
nRowsOverallCoarse
,
0
,
PETSC_NULL
,
0
,
PETSC_NULL
,
10
0
,
PETSC_NULL
,
10
0
,
PETSC_NULL
,
&
mat
[
j
][
i
+
1
]);
MSG
(
"REMOVE THIS LINE WHEN FINISHED!
\n
"
);
MatSetOption
(
mat
[
j
][
i
+
1
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
...
...
@@ -147,4 +212,30 @@ namespace AMDiS {
}
}
bool
ParallelCoarseSpaceMatVec
::
checkMeshChange
(
Matrix
<
DOFMatrix
*>
&
seqMat
,
bool
localMatrix
)
{
FUNCNAME
(
"ParallelCoarseSpaceMatVec::checkMeshChange()"
);
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
seqMat
);
interiorMap
->
setComputeMatIndex
(
!
localMatrix
);
interiorMap
->
update
(
feSpaces
);
// updateSubdomainData();
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
return
true
;
}
return
false
;
}
}
AMDiS/src/parallel/ParallelCoarseSpaceMatVec.h
View file @
83c8b38d
...
...
@@ -44,21 +44,26 @@ namespace AMDiS {
*/
class
ParallelCoarseSpaceMatVec
{
public:
ParallelCoarseSpaceMatVec
()
{}
ParallelCoarseSpaceMatVec
();
/// Creates matrices and vectors with respect to the coarse space.
void
create
(
ParallelDofMapping
*
interiorMap
,
map
<
int
,
ParallelDofMapping
*>
coarseSpaceMap
,
int
subdomainLevel
,
MPI
::
Intracomm
mpiCommLocal
,
MPI
::
Intracomm
mpiCommGlobal
);
void
init
(
ParallelDofMapping
*
interiorMap
,
map
<
int
,
ParallelDofMapping
*>
coarseSpaceMap
,
int
subdomainLevel
,
MPI
::
Intracomm
mpiCommLocal
,
MPI
::
Intracomm
mpiCommGlobal
,
MeshDistributor
*
meshDistributor
);
void
create
(
Matrix
<
DOFMatrix
*>&
seqMat
);
/// Run PETSc's assembly routines.
void
assembly
();
void
destroy
();
bool
checkMeshChange
(
Matrix
<
DOFMatrix
*>
&
mat
,
bool
localMatrix
=
false
);
inline
Mat
&
getInteriorMat
()
{
TEST_EXIT_DBG
(
mat
.
size
()
>
0
)(
"No matrix data!
\n
"
);
...
...
@@ -105,6 +110,8 @@ namespace AMDiS {
private:
vector
<
vector
<
Mat
>
>
mat
;
vector
<
vector
<
MatrixNnzStructure
>
>
nnz
;
ParallelDofMapping
*
interiorMap
;
/// Parallel DOF mapping of the (optional) coarse space. Allows to define
...
...
@@ -112,6 +119,30 @@ namespace AMDiS {
map
<
int
,
ParallelDofMapping
*>
coarseSpaceMap
;
vector
<
int
>
componentIthCoarseMap
;
vector
<
ParallelDofMapping
*>
uniqueCoarseMap
;
MeshDistributor
*
meshDistributor
;
int
subdomainLevel
;
MPI
::
Intracomm
mpiCommLocal
;
MPI
::
Intracomm
mpiCommGlobal
;
/// Stores the mesh change index of the mesh the nnz structure was created for.
/// Therefore, if the mesh change index is higher than this value, we have to create
/// a new nnz structure for PETSc matrices, because the mesh has been changed and
/// therefore also the assembled matrix structure.
int
lastMeshNnz
;
/// If this variable is set to true, the non-zero matrix structure is
/// created each time from scratch by calling \ref createPetscNnzStrcuture.
/// This can be necessary if the number of non-zeros in the matrix varies
/// though the mesh does not change. This may happen if there are many
/// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields.
bool
alwaysCreateNnzStructure
;
};
}
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
83c8b38d
...
...
@@ -62,6 +62,8 @@ namespace AMDiS {
void
PetscSolver
::
updateSubdomainData
()
{
FUNCNAME
(
"PetscSolver::updateSubdomainData()"
);
if
(
mpiCommLocal
.
Get_size
()
==
1
)
{
rStartInterior
=
0
;
nGlobalOverallInterior
=
interiorMap
->
getOverallDofs
();
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
83c8b38d
...
...
@@ -26,8 +26,9 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
petscData
.
create
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
);
petscData
.
init
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
,
meshDistributor
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
nComponents
=
seqMat
->
getNumRows
();
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
83c8b38d
...
...
@@ -21,8 +21,10 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalMatrix::fillPetscMatrix()"
);
petscData
.
create
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
);
petscData
.
init
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
,
meshDistributor
);
petscData
.
create
();
if
(
coarseSpaceMap
.
size
())
{
updateSubdomainData
();
...
...
@@ -36,13 +38,6 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
// === If required, recompute non zero structure of the matrix. ===
if
(
checkMeshChange
(
seqMat
))
nnzInterior
.
create
(
seqMat
,
mpiCommGlobal
,
*
interiorMap
,
&
(
meshDistributor
->
getPeriodicMap
()),
meshDistributor
->
getElementObjectDb
());
// === Create PETSc vector (solution and a temporary vector). ===
int
nRankRows
=
interiorMap
->
getRankDofs
();
...
...
@@ -140,24 +135,6 @@ namespace AMDiS {
int
nRowsRankInterior
=
interiorMap
->
getRankDofs
();
int
nRowsOverallInterior
=
interiorMap
->
getOverallDofs
();
// === If required, recompute non zero structure of the matrix. ===
bool
localMatrix
=
(
subdomainLevel
==
0
);
if
(
checkMeshChange
(
seqMat
,
localMatrix
))
{
nnzInterior
.
create
(
seqMat
,
mpiCommGlobal
,
*
interiorMap
,
NULL
,
meshDistributor
->
getElementObjectDb
(),
localMatrix
);
if
(
coarseSpaceMap
.
size
())
{
MSG
(
"NO COARSE SPACE NNZ!
\n
"
);
/*
nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
*/
}
}
// === Prepare traverse of sequentially created matrices. ===
...
...
@@ -416,6 +393,8 @@ namespace AMDiS {
if
(
removeRhsNullspace
)
{
TEST_EXIT_DBG
(
coarseSpaceMap
.
empty
())(
"Not supported!
\n
"
);
MSG
(
"Remove nullspace from rhs vector.
\n
"
);
MatNullSpaceRemove
(
matNullspace
,
rhsInterior
,
PETSC_NULL
);
}
...
...
@@ -534,40 +513,6 @@ namespace AMDiS {
}
bool
PetscSolverGlobalMatrix
::
checkMeshChange
(
Matrix
<
DOFMatrix
*>
*
mat
,
bool
localMatrix
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::checkMeshChange()"
);
int
recvAllValues
=
0
;
int
sendValue
=
static_cast
<
int
>
(
meshDistributor
->
getLastMeshChangeIndex
()
!=
lastMeshNnz
);
mpiCommGlobal
.
Allreduce
(
&
sendValue
,
&
recvAllValues
,
1
,
MPI_INT
,
MPI_SUM
);
if
(
!
nnzInterior
.
dnnz
||
recvAllValues
!=
0
||
alwaysCreateNnzStructure
)
{
vector
<
const
FiniteElemSpace
*>
feSpaces
=
getFeSpaces
(
mat
);
interiorMap
->
setComputeMatIndex
(
!
localMatrix
);
interiorMap
->
update
(
feSpaces
);
nnzInterior
.
clear
();
if
(
coarseSpaceMap
.
size
())
{
nnzCoarse
.
clear
();
nnzCoarseInt
.
clear
();
nnzIntCoarse
.
clear
();
}
updateSubdomainData
();
lastMeshNnz
=
meshDistributor
->
getLastMeshChangeIndex
();
return
true
;
}
return
false
;
}
void
PetscSolverGlobalMatrix
::
createFieldSplit
(
PC
pc
)
{
FUNCNAME
(
"PetscSolverGlobalMatrix::createFieldSplit()"
);
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.h
View file @
83c8b38d
...
...
@@ -39,14 +39,10 @@ namespace AMDiS {
PetscSolverGlobalMatrix
()
:
PetscSolver
(),
petscSolVec
(
PETSC_NULL
),
lastMeshNnz
(
0
),
zeroStartVector
(
false
),
alwaysCreateNnzStructure
(
false
),
printMatInfo
(
false
)
{
Parameters
::
get
(
"parallel->use zero start vector"
,
zeroStartVector
);
Parameters
::
get
(
"parallel->always create nnz structure"
,
alwaysCreateNnzStructure
);
Parameters
::
get
(
"parallel->print matrix info"
,
printMatInfo
);
}
...
...
@@ -69,8 +65,6 @@ namespace AMDiS {
void
destroyVectorData
();
protected:
bool
checkMeshChange
(
Matrix
<
DOFMatrix
*>
*
mat
,
bool
localMatrix
=
false
);
void
createFieldSplit
(
PC
pc
);
virtual
void
initPreconditioner
(
PC
pc
);
...
...
@@ -99,28 +93,10 @@ namespace AMDiS {
protected:
MatrixNnzStructure
nnzInterior
,
nnzCoarse
;
MatrixNnzStructure
nnzIntCoarse
,
nnzCoarseInt
;
Vec
petscSolVec
;
/// Stores the mesh change index of the mesh the nnz structure was created for.
/// Therefore, if the mesh change index is higher than this value, we have to create
/// a new nnz structure for PETSc matrices, because the mesh has been changed and
/// therefore also the assembled matrix structure.
int
lastMeshNnz
;
bool
zeroStartVector
;
/// If this variable is set to true, the non-zero matrix structure is
/// created each time from scratch by calling \ref createPetscNnzStrcuture.
/// This can be necessary if the number of non-zeros in the matrix varies
/// though the mesh does not change. This may happen if there are many
/// operators using DOFVectors from old timestep containing many zeros due to
/// some phase fields.
bool
alwaysCreateNnzStructure
;
/// If true, after parallel assembling, information about the matrix
/// are printed.
bool
printMatInfo
;
...
...
AMDiS/src/parallel/PetscSolverSchur.cc
View file @
83c8b38d
...
...
@@ -180,8 +180,9 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverSchur::fillPetscMatrix()"
);
petscData
.
create
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
);
petscData
.
init
(
interiorMap
,
coarseSpaceMap
,
subdomainLevel
,
mpiCommLocal
,
mpiCommGlobal
,
meshDistributor
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
int
nComponents
=
seqMat
->
getNumRows
();
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment