Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
6758eb3d
Commit
6758eb3d
authored
Nov 30, 2012
by
Thomas Witkowski
Browse files
Before doing the last big change.
parent
531bf547
Changes
8
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/ProblemStat.cc
View file @
6758eb3d
...
@@ -513,7 +513,8 @@ namespace AMDiS {
...
@@ -513,7 +513,8 @@ namespace AMDiS {
Parameters
::
get
(
initFileStr
,
solverType
);
Parameters
::
get
(
initFileStr
,
solverType
);
OEMSolverCreator
*
solverCreator
=
OEMSolverCreator
*
solverCreator
=
dynamic_cast
<
OEMSolverCreator
*>
(
CreatorMap
<
OEMSolver
>::
getCreator
(
solverType
,
initFileStr
));
dynamic_cast
<
OEMSolverCreator
*>
(
CreatorMap
<
OEMSolver
>::
getCreator
(
solverType
,
initFileStr
));
TEST_EXIT
(
solverCreator
)(
"no solver type
\n
"
);
TEST_EXIT
(
solverCreator
)
(
"No valid solver type found in parameter
\"
%s
\"\n
"
,
initFileStr
.
c_str
());
solverCreator
->
setName
(
initFileStr
);
solverCreator
->
setName
(
initFileStr
);
solver
=
solverCreator
->
create
();
solver
=
solverCreator
->
create
();
solver
->
initParameters
();
solver
->
initParameters
();
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
6758eb3d
...
@@ -228,7 +228,7 @@ namespace AMDiS {
...
@@ -228,7 +228,7 @@ namespace AMDiS {
if
(
writePartMesh
>
0
)
{
if
(
writePartMesh
>
0
)
{
debug
::
writeElementIndexMesh
(
mesh
,
debugOutputDir
+
"elementIndex"
);
debug
::
writeElementIndexMesh
(
mesh
,
debugOutputDir
+
"elementIndex"
);
ParallelDebug
::
writePartitioning
(
*
this
,
debugOutputDir
+
"part
.vtu
"
);
ParallelDebug
::
writePartitioning
(
*
this
,
debugOutputDir
+
"part"
);
}
}
}
}
...
@@ -901,7 +901,7 @@ namespace AMDiS {
...
@@ -901,7 +901,7 @@ namespace AMDiS {
void
MeshDistributor
::
createMeshLevelStructure
()
void
MeshDistributor
::
createMeshLevelStructure
()
{
{
FUNCNAME
(
"MeshDistributor::createMeshLevelStructure()"
);
FUNCNAME
(
"MeshDistributor::createMeshLevelStructure()"
);
if
(
mpiSize
!=
16
)
if
(
mpiSize
!=
16
)
return
;
return
;
...
@@ -1663,10 +1663,14 @@ namespace AMDiS {
...
@@ -1663,10 +1663,14 @@ namespace AMDiS {
// MSG("Memory usage of element object database = %5.f KByte\n",
// MSG("Memory usage of element object database = %5.f KByte\n",
// static_cast<double>(memsize / 1024.0));
// static_cast<double>(memsize / 1024.0));
MSG
(
"MAKE INT BOUND!
\n
"
);
intBoundary
.
create
(
levelData
,
0
,
elObjDb
);
intBoundary
.
create
(
levelData
,
0
,
elObjDb
);
ParallelDebug
::
printBoundaryInfo
(
intBoundary
);
ParallelDebug
::
printBoundaryInfo
(
intBoundary
);
MSG
(
"TEST = %d
\n
"
,
levelData
.
getLevelNumber
());
if
(
levelData
.
getLevelNumber
()
>
1
)
{
if
(
levelData
.
getLevelNumber
()
>
1
)
{
MSG
(
"MAKE INT SD BOUND!
\n
"
);
intBoundarySd
.
create
(
levelData
,
1
,
elObjDb
);
intBoundarySd
.
create
(
levelData
,
1
,
elObjDb
);
ParallelDebug
::
printBoundaryInfo
(
intBoundarySd
,
0
,
true
);
ParallelDebug
::
printBoundaryInfo
(
intBoundarySd
,
0
,
true
);
}
}
...
@@ -1847,18 +1851,11 @@ namespace AMDiS {
...
@@ -1847,18 +1851,11 @@ namespace AMDiS {
int
test
=
0
;
int
test
=
0
;
Parameters
::
get
(
"parallel->remove periodic boundary"
,
test
);
Parameters
::
get
(
"parallel->remove periodic boundary"
,
test
);
if
(
test
==
0
)
{
// if (test == 0) {
ParallelDebug
::
testCommonDofs
(
*
this
,
true
);
//121106 replaced if statement
ParallelDebug
::
testGlobalIndexByCoords
(
*
this
);
if
(
true
){
//write latest mesh properties to vtu file
debug
::
writeElementIndexMesh
(
mesh
,
debugOutputDir
+
"elementIndex"
);
ParallelDebug
::
writePartitioning
(
*
this
,
debugOutputDir
+
"part.vtu"
);
ParallelDebug
::
testCommonDofs
(
*
this
,
true
);
ParallelDebug
::
testGlobalIndexByCoords
(
*
this
);
}
}
#else
#else
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dofMaps
.
size
());
i
++
)
{
for
(
int
i
=
0
;
i
<
static_cast
<
int
>
(
dofMaps
.
size
());
i
++
)
{
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
6758eb3d
...
@@ -191,6 +191,16 @@ namespace AMDiS {
...
@@ -191,6 +191,16 @@ namespace AMDiS {
return
mpiComm
;
return
mpiComm
;
}
}
inline
MPI
::
Intracomm
getLocalMpiComm
()
{
if
(
levelData
.
getLevelNumber
()
==
1
)
return
PETSC_COMM_SELF
;
TEST_EXIT
(
levelData
.
getLevelNumber
()
==
2
)(
"Not yet implemented!
\n
"
);
return
levelData
.
getMpiComm
(
1
);
}
inline
bool
isInitialized
()
inline
bool
isInitialized
()
{
{
return
initialized
;
return
initialized
;
...
...
AMDiS/src/parallel/ParallelCoarseSpaceSolver.cc
View file @
6758eb3d
...
@@ -61,6 +61,28 @@ namespace AMDiS {
...
@@ -61,6 +61,28 @@ namespace AMDiS {
}
}
void
ParallelCoarseSpaceSolver
::
setMeshDistributor
(
MeshDistributor
*
md
)
{
FUNCNAME
(
"ParallelCoarseSpaceSolver::setMeshDistributor()"
);
meshDistributor
=
md
;
mpiCommGlobal
=
meshDistributor
->
getMpiComm
();
mpiCommLocal
=
meshDistributor
->
getLocalMpiComm
();
}
void
ParallelCoarseSpaceSolver
::
setMeshDistributor
(
MeshDistributor
*
md
,
MPI
::
Intracomm
mpiComm0
,
MPI
::
Intracomm
mpiComm1
)
{
FUNCNAME
(
"ParallelCoarseSpaceSolver::setMeshDistributor()"
);
meshDistributor
=
md
;
mpiCommGlobal
=
mpiComm0
;
mpiCommLocal
=
mpiComm1
;
}
void
ParallelCoarseSpaceSolver
::
prepare
()
void
ParallelCoarseSpaceSolver
::
prepare
()
{
{
FUNCNAME
(
"ParallelCoarseSpaceSolver:prepare()"
);
FUNCNAME
(
"ParallelCoarseSpaceSolver:prepare()"
);
...
@@ -116,7 +138,7 @@ namespace AMDiS {
...
@@ -116,7 +138,7 @@ namespace AMDiS {
bool
localMatrix
=
(
coarseSpaceMap
.
size
()
&&
subdomainLevel
==
0
);
bool
localMatrix
=
(
coarseSpaceMap
.
size
()
&&
subdomainLevel
==
0
);
if
(
checkMeshChange
())
{
if
(
checkMeshChange
())
{
int
nMat
=
uniqueCoarseMap
.
size
()
+
1
;
int
nMat
=
uniqueCoarseMap
.
size
()
+
1
;
nnz
.
resize
(
nMat
);
nnz
.
resize
(
nMat
);
for
(
int
i
=
0
;
i
<
nMat
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nMat
;
i
++
)
{
nnz
[
i
].
resize
(
nMat
);
nnz
[
i
].
resize
(
nMat
);
...
@@ -151,21 +173,52 @@ namespace AMDiS {
...
@@ -151,21 +173,52 @@ namespace AMDiS {
int
nRankInteriorRows
=
interiorMap
->
getRankDofs
();
int
nRankInteriorRows
=
interiorMap
->
getRankDofs
();
int
nOverallInteriorRows
=
interiorMap
->
getOverallDofs
();
int
nOverallInteriorRows
=
interiorMap
->
getOverallDofs
();
MPI
::
Intracomm
mpiGlobal
=
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
subdomainLevel
);
if
(
localMatrix
)
{
if
(
localMatrix
)
{
MatCreateSeqAIJ
(
mpiCommLocal
,
nRankInteriorRows
,
nRankInteriorRows
,
MatCreateSeqAIJ
(
mpiCommLocal
,
nRankInteriorRows
,
nRankInteriorRows
,
0
,
nnz
[
0
][
0
].
dnnz
,
0
,
nnz
[
0
][
0
].
dnnz
,
&
mat
[
0
][
0
]);
&
mat
[
0
][
0
]);
}
else
{
}
else
{
MatCreateAIJ
(
mpiCommGlobal
,
nRankInteriorRows
,
nRankInteriorRows
,
if
(
subdomainLevel
==
0
)
{
nOverallInteriorRows
,
nOverallInteriorRows
,
MatCreateAIJ
(
mpiGlobal
,
nRankInteriorRows
,
nRankInteriorRows
,
0
,
nnz
[
0
][
0
].
dnnz
,
0
,
nnz
[
0
][
0
].
onnz
,
nOverallInteriorRows
,
nOverallInteriorRows
,
&
mat
[
0
][
0
]);
0
,
nnz
[
0
][
0
].
dnnz
,
0
,
nnz
[
0
][
0
].
onnz
,
&
mat
[
0
][
0
]);
}
else
{
MSG
(
"WARNING: USE MULTILEVEL, MAKE THIS GENERAL!
\n
"
);
// TODO: MAKE NNZ CALCULATION GENERAL (see also creation of coarse space
// matrices some lines below)
MatCreateAIJ
(
mpiGlobal
,
nRankInteriorRows
,
nRankInteriorRows
,
nOverallInteriorRows
,
nOverallInteriorRows
,
300
,
PETSC_NULL
,
300
,
PETSC_NULL
,
&
mat
[
0
][
0
]);
}
}
}
MatSetOption
(
mat
[
0
][
0
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatSetOption
(
mat
[
0
][
0
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
VecCreateMPI
(
mpiCommGlobal
,
nRankInteriorRows
,
nOverallInteriorRows
,
&
vecSol
[
0
]);
VecCreateMPI
(
mpiCommGlobal
,
nRankInteriorRows
,
nOverallInteriorRows
,
&
vecRhs
[
0
]);
if
(
subdomainLevel
==
0
)
{
VecCreateMPI
(
mpiGlobal
,
nRankInteriorRows
,
nOverallInteriorRows
,
&
vecSol
[
0
]);
VecCreateMPI
(
mpiGlobal
,
nRankInteriorRows
,
nOverallInteriorRows
,
&
vecRhs
[
0
]);
}
else
{
MSG
(
"WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!
\n
"
);
MPI
::
Intracomm
mpiComm
=
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
subdomainLevel
-
1
);
int
nAllRows
=
nRankInteriorRows
;
mpi
::
globalAdd
(
nAllRows
);
VecCreateMPI
(
mpiComm
,
nRankInteriorRows
,
nAllRows
,
&
vecSol
[
0
]);
VecCreateMPI
(
mpiComm
,
nRankInteriorRows
,
nAllRows
,
&
vecRhs
[
0
]);
}
int
nCoarseMap
=
uniqueCoarseMap
.
size
();
int
nCoarseMap
=
uniqueCoarseMap
.
size
();
for
(
int
i
=
0
;
i
<
nCoarseMap
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nCoarseMap
;
i
++
)
{
...
@@ -174,16 +227,30 @@ namespace AMDiS {
...
@@ -174,16 +227,30 @@ namespace AMDiS {
int
nRankCoarseRows
=
cMap
->
getRankDofs
();
int
nRankCoarseRows
=
cMap
->
getRankDofs
();
int
nOverallCoarseRows
=
cMap
->
getOverallDofs
();
int
nOverallCoarseRows
=
cMap
->
getOverallDofs
();
MatCreateAIJ
(
mpiCommGlobal
,
if
(
subdomainLevel
==
0
)
{
nRankCoarseRows
,
nRankCoarseRows
,
MatCreateAIJ
(
mpiCommGlobal
,
nOverallCoarseRows
,
nOverallCoarseRows
,
nRankCoarseRows
,
nRankCoarseRows
,
0
,
nnz
[
i
+
1
][
i
+
1
].
dnnz
,
0
,
nnz
[
i
+
1
][
i
+
1
].
onnz
,
nOverallCoarseRows
,
nOverallCoarseRows
,
&
mat
[
i
+
1
][
i
+
1
]);
0
,
nnz
[
i
+
1
][
i
+
1
].
dnnz
,
0
,
nnz
[
i
+
1
][
i
+
1
].
onnz
,
&
mat
[
i
+
1
][
i
+
1
]);
}
else
{
MSG
(
"WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!
\n
"
);
MPI
::
Intracomm
mpiComm
=
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
subdomainLevel
-
1
);
MatCreateAIJ
(
mpiComm
,
nRankCoarseRows
,
nRankCoarseRows
,
nOverallCoarseRows
,
nOverallCoarseRows
,
300
,
PETSC_NULL
,
300
,
PETSC_NULL
,
&
mat
[
i
+
1
][
i
+
1
]);
}
MatSetOption
(
mat
[
i
+
1
][
i
+
1
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
MatSetOption
(
mat
[
i
+
1
][
i
+
1
],
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
cMap
->
createVec
(
vecSol
[
i
+
1
]);
cMap
->
createVec
(
vecSol
[
i
+
1
]);
cMap
->
createVec
(
vecRhs
[
i
+
1
]);
cMap
->
createVec
(
vecRhs
[
i
+
1
]);
}
}
for
(
int
i
=
0
;
i
<
nCoarseMap
+
1
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nCoarseMap
+
1
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nCoarseMap
+
1
;
j
++
)
{
for
(
int
j
=
0
;
j
<
nCoarseMap
+
1
;
j
++
)
{
if
(
i
==
j
)
if
(
i
==
j
)
...
@@ -195,18 +262,43 @@ namespace AMDiS {
...
@@ -195,18 +262,43 @@ namespace AMDiS {
int
nColsRankMat
=
(
j
==
0
?
nRankInteriorRows
:
uniqueCoarseMap
[
j
-
1
]
->
getRankDofs
());
int
nColsRankMat
=
(
j
==
0
?
nRankInteriorRows
:
uniqueCoarseMap
[
j
-
1
]
->
getRankDofs
());
int
nColsOverallMat
=
(
j
==
0
?
nOverallInteriorRows
:
uniqueCoarseMap
[
j
-
1
]
->
getOverallDofs
());
int
nColsOverallMat
=
(
j
==
0
?
nOverallInteriorRows
:
uniqueCoarseMap
[
j
-
1
]
->
getOverallDofs
());
MatCreateAIJ
(
mpiCommGlobal
,
if
(
subdomainLevel
==
0
)
{
nRowsRankMat
,
nColsRankMat
,
MatCreateAIJ
(
mpiCommGlobal
,
nRowsOverallMat
,
nColsOverallMat
,
nRowsRankMat
,
nColsRankMat
,
0
,
nnz
[
i
][
j
].
dnnz
,
0
,
nnz
[
i
][
j
].
onnz
,
nRowsOverallMat
,
nColsOverallMat
,
&
mat
[
i
][
j
]);
0
,
nnz
[
i
][
j
].
dnnz
,
0
,
nnz
[
i
][
j
].
onnz
,
MatCreateAIJ
(
mpiCommGlobal
,
&
mat
[
i
][
j
]);
nColsRankMat
,
nRowsRankMat
,
MatCreateAIJ
(
mpiCommGlobal
,
nColsOverallMat
,
nRowsOverallMat
,
nColsRankMat
,
nRowsRankMat
,
0
,
nnz
[
j
][
i
].
dnnz
,
0
,
nnz
[
j
][
i
].
onnz
,
nColsOverallMat
,
nRowsOverallMat
,
&
mat
[
j
][
i
]);
0
,
nnz
[
j
][
i
].
dnnz
,
0
,
nnz
[
j
][
i
].
onnz
,
&
mat
[
j
][
i
]);
}
else
{
MSG
(
"WARNING: USE MULTILEVEL AND THIS IS A VERY BAD HACK, MAKE GENERAL!!
\n
"
);
if
(
i
==
0
)
{
nRowsOverallMat
=
nRowsRankMat
;
mpi
::
globalAdd
(
nRowsOverallMat
);
}
if
(
j
==
0
)
{
nColsOverallMat
=
nColsRankMat
;
mpi
::
globalAdd
(
nColsOverallMat
);
}
MatCreateAIJ
(
mpiCommGlobal
,
nRowsRankMat
,
nColsRankMat
,
nRowsOverallMat
,
nColsOverallMat
,
300
,
PETSC_NULL
,
300
,
PETSC_NULL
,
&
mat
[
i
][
j
]);
MatCreateAIJ
(
mpiCommGlobal
,
nColsRankMat
,
nRowsRankMat
,
nColsOverallMat
,
nRowsOverallMat
,
300
,
PETSC_NULL
,
300
,
PETSC_NULL
,
&
mat
[
j
][
i
]);
}
}
}
}
}
}
}
...
...
AMDiS/src/parallel/ParallelCoarseSpaceSolver.h
View file @
6758eb3d
...
@@ -78,15 +78,13 @@ namespace AMDiS {
...
@@ -78,15 +78,13 @@ namespace AMDiS {
void
setCoarseSpaceDofMapping
(
ParallelDofMapping
*
coarseDofs
,
void
setCoarseSpaceDofMapping
(
ParallelDofMapping
*
coarseDofs
,
int
component
=
-
1
);
int
component
=
-
1
);
/// Set mesh distributor object and MPI communicators.
/// Set mesh distributor object
void
setMeshDistributor
(
MeshDistributor
*
m
,
void
setMeshDistributor
(
MeshDistributor
*
m
);
/// Set mesh distributor object
void
setMeshDistributor
(
MeshDistributor
*
md
,
MPI
::
Intracomm
mpiComm0
,
MPI
::
Intracomm
mpiComm0
,
MPI
::
Intracomm
mpiComm1
)
MPI
::
Intracomm
mpiComm1
);
{
meshDistributor
=
m
;
mpiCommGlobal
=
mpiComm0
;
mpiCommLocal
=
mpiComm1
;
}
/// Set level of the interior discretization. Is only used for
/// Set level of the interior discretization. Is only used for
/// multi-level methods.
/// multi-level methods.
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
6758eb3d
...
@@ -55,9 +55,7 @@ namespace AMDiS {
...
@@ -55,9 +55,7 @@ namespace AMDiS {
meshDistributor
->
setBoundaryDofRequirement
(
petscSolver
->
getBoundaryDofRequirement
());
meshDistributor
->
setBoundaryDofRequirement
(
petscSolver
->
getBoundaryDofRequirement
());
petscSolver
->
setMeshDistributor
(
meshDistributor
,
petscSolver
->
setMeshDistributor
(
meshDistributor
);
meshDistributor
->
getMpiComm
(),
PETSC_COMM_SELF
);
petscSolver
->
init
(
this
->
getComponentSpaces
(),
this
->
getFeSpaces
());
petscSolver
->
init
(
this
->
getComponentSpaces
(),
this
->
getFeSpaces
());
}
}
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
6758eb3d
...
@@ -101,13 +101,25 @@ namespace AMDiS {
...
@@ -101,13 +101,25 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::initialize()"
);
FUNCNAME
(
"PetscSolverFeti::initialize()"
);
MSG
(
"INIT WITH MESH-LEVEL: %d
\n
"
,
meshLevel
);
TEST_EXIT_DBG
(
meshLevel
+
1
==
meshDistributor
->
getMeshLevelData
().
getLevelNumber
())
TEST_EXIT_DBG
(
meshLevel
+
1
==
meshDistributor
->
getMeshLevelData
().
getLevelNumber
())
(
"Mesh hierarchy does not contain %d levels!
\n
"
,
meshLevel
+
1
);
(
"Mesh hierarchy does not contain %d levels!
\n
"
,
meshLevel
+
1
);
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
if
(
subdomain
==
NULL
)
{
if
(
subdomain
==
NULL
)
{
subdomain
=
new
PetscSolverGlobalMatrix
(
""
);
string
subSolverInitStr
=
initFileStr
+
"->subsolver"
;
string
solverType
=
"petsc"
;
Parameters
::
get
(
subSolverInitStr
,
solverType
);
OEMSolverCreator
*
solverCreator
=
dynamic_cast
<
OEMSolverCreator
*>
(
CreatorMap
<
OEMSolver
>::
getCreator
(
solverType
,
initFileStr
));
TEST_EXIT
(
solverCreator
)
(
"No valid solver type found in parameter
\"
%s
\"\n
"
,
initFileStr
.
c_str
());
solverCreator
->
setName
(
subSolverInitStr
);
subdomain
=
dynamic_cast
<
PetscSolver
*>
(
solverCreator
->
create
());
subdomain
->
initParameters
();
subdomain
->
setSymmetric
(
isSymmetric
);
subdomain
->
setSymmetric
(
isSymmetric
);
if
(
dirichletMode
==
0
)
if
(
dirichletMode
==
0
)
subdomain
->
setHandleDirichletRows
(
true
);
subdomain
->
setHandleDirichletRows
(
true
);
...
@@ -115,14 +127,15 @@ namespace AMDiS {
...
@@ -115,14 +127,15 @@ namespace AMDiS {
subdomain
->
setHandleDirichletRows
(
false
);
subdomain
->
setHandleDirichletRows
(
false
);
if
(
meshLevel
==
0
)
{
if
(
meshLevel
==
0
)
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
subdomain
->
setMeshDistributor
(
meshDistributor
);
mpiCommGlobal
,
mpiCommLocal
);
}
else
{
}
else
{
subdomain
->
setMeshDistributor
(
meshDistributor
,
subdomain
->
setMeshDistributor
(
meshDistributor
,
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
-
1
),
levelData
.
getMpiComm
(
meshLevel
));
levelData
.
getMpiComm
(
meshLevel
));
subdomain
->
setLevel
(
meshLevel
);
subdomain
->
setLevel
(
meshLevel
);
}
}
delete
solverCreator
;
}
}
primalDofMap
.
init
(
levelData
,
componentSpaces
,
feSpaces
);
primalDofMap
.
init
(
levelData
,
componentSpaces
,
feSpaces
);
...
@@ -318,8 +331,10 @@ namespace AMDiS {
...
@@ -318,8 +331,10 @@ namespace AMDiS {
WorldVector
<
double
>
c
;
WorldVector
<
double
>
c
;
feSpace
->
getMesh
()
->
getDofIndexCoords
(
*
it
,
feSpace
,
c
);
feSpace
->
getMesh
()
->
getDofIndexCoords
(
*
it
,
feSpace
,
c
);
if
(
fabs
(
c
[
0
])
<
e
||
fabs
(
c
[
1
])
<
e
||
if
((
fabs
(
c
[
0
])
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
)
||
fabs
(
c
[
0
]
-
25.0
)
<
e
||
fabs
(
c
[
1
]
-
25.0
)
<
e
||
(
fabs
(
c
[
0
]
-
25.0
)
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
)
||
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
])
<
e
)
||
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
]
-
25.0
)
<
e
)
||
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
))
{
(
fabs
(
c
[
0
]
-
12.5
)
<
e
&&
fabs
(
c
[
1
]
-
12.5
)
<
e
))
{
MSG
(
"PRIMAL COORD %f %f
\n
"
,
c
[
0
],
c
[
1
]);
MSG
(
"PRIMAL COORD %f %f
\n
"
,
c
[
0
],
c
[
1
]);
primals
.
insert
(
**
it
);
primals
.
insert
(
**
it
);
...
@@ -552,7 +567,6 @@ namespace AMDiS {
...
@@ -552,7 +567,6 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::createIndexB()"
);
FUNCNAME
(
"PetscSolverFeti::createIndexB()"
);
const
FiniteElemSpace
*
feSpace
=
componentSpaces
[
component
];
const
FiniteElemSpace
*
feSpace
=
componentSpaces
[
component
];
DOFAdmin
*
admin
=
feSpace
->
getAdmin
();
DOFAdmin
*
admin
=
feSpace
->
getAdmin
();
...
@@ -937,8 +951,17 @@ namespace AMDiS {
...
@@ -937,8 +951,17 @@ namespace AMDiS {
if
(
augmentedLagrange
==
false
)
{
if
(
augmentedLagrange
==
false
)
{
schurPrimalData
.
subSolver
=
subdomain
;
schurPrimalData
.
subSolver
=
subdomain
;
localDofMap
.
createVec
(
schurPrimalData
.
tmp_vec_b
,
nGlobalOverallInterior
);
if
(
meshLevel
==
0
)
{
localDofMap
.
createVec
(
schurPrimalData
.
tmp_vec_b
,
nGlobalOverallInterior
);
}
else
{
MSG
(
"WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!
\n
"
);
VecCreateMPI
(
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
meshLevel
-
1
),
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
(
schurPrimalData
.
tmp_vec_b
));
}
primalDofMap
.
createVec
(
schurPrimalData
.
tmp_vec_primal
);
primalDofMap
.
createVec
(
schurPrimalData
.
tmp_vec_primal
);
MatCreateShell
(
mpiCommGlobal
,
MatCreateShell
(
mpiCommGlobal
,
primalDofMap
.
getRankDofs
(),
primalDofMap
.
getRankDofs
(),
...
@@ -1221,7 +1244,15 @@ namespace AMDiS {
...
@@ -1221,7 +1244,15 @@ namespace AMDiS {
fetiData
.
subSolver
=
subdomain
;
fetiData
.
subSolver
=
subdomain
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
fetiData
.
ksp_schur_primal
=
&
ksp_schur_primal
;
localDofMap
.
createVec
(
fetiData
.
tmp_vec_b0
,
nGlobalOverallInterior
);
if
(
meshLevel
==
0
)
{
localDofMap
.
createVec
(
fetiData
.
tmp_vec_b0
,
nGlobalOverallInterior
);
}
else
{
MSG
(
"WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!
\n
"
);
VecCreateMPI
(
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
meshLevel
-
1
),
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
(
fetiData
.
tmp_vec_b0
));
}
lagrangeMap
.
createVec
(
fetiData
.
tmp_vec_lagrange
);
lagrangeMap
.
createVec
(
fetiData
.
tmp_vec_lagrange
);
primalDofMap
.
createVec
(
fetiData
.
tmp_vec_primal0
);
primalDofMap
.
createVec
(
fetiData
.
tmp_vec_primal0
);
...
@@ -1596,7 +1627,7 @@ namespace AMDiS {
...
@@ -1596,7 +1627,7 @@ namespace AMDiS {
#if (DEBUG != 0)
#if (DEBUG != 0)
PetscInt
nRow
,
nCol
;
PetscInt
nRow
,
nCol
;
MatGetSize
(
subdomain
->
getMatInterior
(),
&
nRow
,
&
nCol
);
MatGet
Local
Size
(
subdomain
->
getMatInterior
(),
&
nRow
,
&
nCol
);
mpi
::
globalAdd
(
nRow
);
mpi
::
globalAdd
(
nRow
);
mpi
::
globalAdd
(
nCol
);
mpi
::
globalAdd
(
nCol
);
...
@@ -1893,7 +1924,9 @@ namespace AMDiS {
...
@@ -1893,7 +1924,9 @@ namespace AMDiS {
createDirichletData
(
*
mat
);
createDirichletData
(
*
mat
);
createFetiData
();
createFetiData
();
TEST_EXIT
(
coarseSpaceMap
.
size
()
==
0
)(
"This is not yet supported!
\n
"
);
// === Create matrices for the FETI-DP method. ===
// === Create matrices for the FETI-DP method. ===
if
(
printTimings
)
if
(
printTimings
)
...
@@ -1944,6 +1977,8 @@ namespace AMDiS {
...
@@ -1944,6 +1977,8 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
FUNCNAME
(
"PetscSolverFeti::fillPetscRhs()"
);
TEST_EXIT
(
coarseSpaceMap
.
size
()
==
0
)(
"This is not yet supported!
\n
"
);
subdomain
->
fillPetscRhs
(
vec
);
subdomain
->
fillPetscRhs
(
vec
);
}
}
...
@@ -2193,8 +2228,18 @@ namespace AMDiS {
...
@@ -2193,8 +2228,18 @@ namespace AMDiS {
// === Some temporary vectors. ===
// === Some temporary vectors. ===
Vec
tmp_b0
,
tmp_b1
;
Vec
tmp_b0
,
tmp_b1
;
localDofMap
.
createVec
(
tmp_b0
,
nGlobalOverallInterior
);
if
(
meshLevel
==
0
)
{
localDofMap
.
createVec
(
tmp_b1
,
nGlobalOverallInterior
);
localDofMap
.
createVec
(
tmp_b0
,
nGlobalOverallInterior
);
localDofMap
.
createVec
(
tmp_b1
,
nGlobalOverallInterior
);
}
else
{
MSG
(
"WARNING: MULTILEVEL SUPER SHIT, MAKE GENERAL!
\n
"
);
VecCreateMPI
(
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
meshLevel
-
1
),
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
tmp_b0
);
VecCreateMPI
(
meshDistributor
->
getMeshLevelData
().
getMpiComm
(
meshLevel
-
1
),
localDofMap
.
getRankDofs
(),
nGlobalOverallInterior
,
&
tmp_b1
);
}
Vec
tmp_primal0
,
tmp_primal1
;
Vec
tmp_primal0
,
tmp_primal1
;
primalDofMap
.
createVec
(
tmp_primal0
);
primalDofMap
.
createVec
(
tmp_primal0
);
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
6758eb3d
...
@@ -825,9 +825,7 @@ namespace AMDiS {
...
@@ -825,9 +825,7 @@ namespace AMDiS {
PetscSolver
*
subSolver
=
new
PetscSolverGlobalMatrix
(
""
);
PetscSolver
*
subSolver
=
new
PetscSolverGlobalMatrix
(
""
);
subSolver
->
setKspPrefix
(
kspPrefix
);
subSolver
->
setKspPrefix
(
kspPrefix
);
subSolver
->
setMeshDistributor
(
meshDistributor
,
subSolver
->
setMeshDistributor
(
meshDistributor
);
mpiCommGlobal
,
mpiCommLocal
);