Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Aland, Sebastian
amdis
Commits
5fba743d
Commit
5fba743d
authored
May 11, 2012
by
Thomas Witkowski
Browse files
Blub, teil 2
parent
2c96156e
Changes
2
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
5fba743d
...
...
@@ -692,6 +692,7 @@ namespace AMDiS {
localDofMap
.
getMatIndex
(
k
,
it
->
first
)
+
rStartInterior
;
double
value
=
(
W
[
i
]
==
mpiRank
?
1.0
:
-
1.0
);
MSG
(
"SET VALUE: %f
\n
"
,
value
);
MatSetValue
(
mat_lagrange
,
index
,
colIndex
,
value
,
INSERT_VALUES
);
}
index
++
;
...
...
@@ -1383,18 +1384,57 @@ namespace AMDiS {
subDomainSolver
->
solveGlobal
(
subDomainSolver
->
getRhsInterior
(),
tmp_b0
);
MatMult
(
mat_lagrange
,
tmp_b0
,
vec_rhs
);
#if
0
#if
1
PetscViewer
matview
;
PetscViewerBinaryOpen(mpiComm, "
vec
_lag.dat", FILE_MODE_WRITE, &matview);
Vec
View(
vec_rhs
, matview);
PetscViewerBinaryOpen
(
mpiComm
,
"
mat
_lag.dat"
,
FILE_MODE_WRITE
,
&
matview
);
Mat
View
(
mat_lagrange
,
matview
);
PetscViewerDestroy
(
&
matview
);
ParallelDebug
::
writeDebugFile
(
vec
.
getDOFVector
(
0
)
->
getFeSpace
(),
lagrangeMap
,
"lag"
,
"dat"
);
MPI::Finalize();
if
(
meshLevel
==
1
)
{
MeshLevelData
&
levelData
=
meshDistributor
->
getMeshLevelData
();
DofMap
&
m
=
localDofMap
[
vec
.
getDOFVector
(
0
)
->
getFeSpace
()].
getMap
();
int
groupRowsInterior
=
0
;
if
(
levelData
.
getMpiComm
(
meshLevel
).
Get_rank
()
==
0
)
groupRowsInterior
=
localDofMap
[
vec
.
getDOFVector
(
0
)
->
getFeSpace
()].
nOverallDofs
;
int
rStart
,
nGlobal
;
mpi
::
getDofNumbering
(
mpiComm
,
groupRowsInterior
,
rStart
,
nGlobal
);
int
tmp
=
0
;
if
(
levelData
.
getMpiComm
(
meshLevel
).
Get_rank
()
==
0
)
tmp
=
rStart
;
int
a
=
0
;
levelData
.
getMpiComm
(
meshLevel
).
Allreduce
(
&
tmp
,
&
a
,
1
,
MPI_INT
,
MPI_SUM
);
MSG
(
"RSTART %d
\n
"
,
a
);
for
(
DofMap
::
iterator
it
=
m
.
begin
();
it
!=
m
.
end
();
it
++
)
it
->
second
.
global
+=
a
;
}
else
{
int
groupRowsInterior
=
localDofMap
[
vec
.
getDOFVector
(
0
)
->
getFeSpace
()].
nRankDofs
;
MSG
(
"DA TEST: %d
\n
"
,
groupRowsInterior
);
int
rStart
,
nGlobal
;
mpi
::
getDofNumbering
(
mpiComm
,
groupRowsInterior
,
rStart
,
nGlobal
);
DofMap
&
m
=
localDofMap
[
vec
.
getDOFVector
(
0
)
->
getFeSpace
()].
getMap
();
MSG
(
"RSTART %d
\n
"
,
rStart
);
for
(
DofMap
::
iterator
it
=
m
.
begin
();
it
!=
m
.
end
();
it
++
)
it
->
second
.
global
=
it
->
second
.
local
+
rStart
;
}
ParallelDebug
::
writeDebugFile
(
vec
.
getDOFVector
(
0
)
->
getFeSpace
(),
localDofMap
,
"interior"
,
"dat"
);
MPI
::
Finalize
();
exit
(
0
);
#endif
...
...
AMDiS/src/parallel/SubDomainSolver.cc
View file @
5fba743d
...
...
@@ -73,21 +73,12 @@ namespace AMDiS {
}
else
{
multilevel
=
true
;
MSG
(
"CREATE SUBDOMAIN WITH %d Ranks
\n
"
,
mpiCommInterior
.
Get_size
());
MatCreateMPIAIJ
(
mpiCommInterior
,
nRowsRankInterior
,
nRowsRankInterior
,
nRowsOverallInterior
,
nRowsOverallInterior
,
60
,
PETSC_NULL
,
60
,
PETSC_NULL
,
&
matIntInt
);
}
MSG
(
"INTERIOR SPACE SIZE: %d %d %d
\n
"
,
nRowsRankInterior
,
nRowsOverallInterior
,
nGlobalOverallInterior
);
MSG
(
"COARSE SPACE SIZE: %d %d
\n
"
,
nRowsRankCoarse
,
nRowsOverallCoarse
);
MatCreateMPIAIJ
(
mpiCommCoarseSpace
,
nRowsRankCoarse
,
nRowsRankCoarse
,
nRowsOverallCoarse
,
nRowsOverallCoarse
,
...
...
@@ -260,13 +251,17 @@ namespace AMDiS {
FILE_MODE_WRITE, &matview);
MatView(matCoarseCoarse, matview);
PetscViewerDestroy(&matview);
ParallelDebug::writeDebugFile(feSpaces[0],
*coarseSpaceMap,
"coarsespace", "dat");
#endif
#if 0
if (MPI::COMM_WORLD.Get_rank() ==
0
||
MPI::COMM_WORLD.Get_rank() ==
1
||
MPI::COMM_WORLD.Get_rank() ==
2
||
MPI::COMM_WORLD.Get_rank() ==
3
) {
if (MPI::COMM_WORLD.Get_rank() ==
4
||
MPI::COMM_WORLD.Get_rank() ==
5
||
MPI::COMM_WORLD.Get_rank() ==
6
||
MPI::COMM_WORLD.Get_rank() ==
7
) {
PetscViewerBinaryOpen(mpiCommInterior, "mat_interior.dat",
FILE_MODE_WRITE, &matview);
MatView(matIntInt, matview);
...
...
@@ -279,7 +274,7 @@ namespace AMDiS {
#endif
#if 0
if (MPI::COMM_WORLD.Get_rank() ==
0
) {
if (MPI::COMM_WORLD.Get_rank() ==
1
) {
PetscViewerBinaryOpen(mpiCommInterior, "mat_interior.dat",
FILE_MODE_WRITE, &matview);
MatView(matIntInt, matview);
...
...
@@ -325,12 +320,6 @@ namespace AMDiS {
index
=
interiorMap
->
getMatIndex
(
i
,
index
)
+
rStartInterior
;
int
t
=
interiorMap
->
getLocalMatIndex
(
i
,
d
);
VecSetValue
(
rhsInterior
,
index
,
*
dofIt
,
INSERT_VALUES
);
// if (index == 12474) {
// MSG("TEST INDEX %d AT COMPONENT %d with VAL %f\n", d, i, *dofIt);
// }
// MSG("FILL AT COMP %d DOF %d WITH PETSC INDEX %d/%d to VAL %f\n",
// i, d, index, t, *dofIt);
}
}
}
...
...
@@ -342,26 +331,6 @@ namespace AMDiS {
VecAssemblyBegin
(
rhsInterior
);
VecAssemblyEnd
(
rhsInterior
);
/*
PetscScalar *localSolB;
VecGetArray(rhsInterior, &localSolB);
MSG("TEST A: %f\n", localSolB[0]);
VecRestoreArray(rhsInterior, &localSolB);
VecGetArray(rhsInterior, &localSolB);
MSG("TEST B: %f\n", localSolB[0]);
VecRestoreArray(rhsInterior, &localSolB);
MSG("FILL RHS %p\n", rhsInterior);
PetscInt a, b;
VecGetOwnershipRange(rhsInterior, &a, &b);
MSG("RANGE OF ITERIOR: %d %d\n", a, b);
*/
// MPI::Finalize();
// exit(0);
#if 0
PetscViewer matview;
PetscViewerBinaryOpen(mpiCommCoarseSpace, "vec_interior.dat",
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment