Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
amdis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Aland, Sebastian
amdis
Commits
2e22f957
Commit
2e22f957
authored
Jan 03, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
PETSc global block solver should now work correctly.
parent
ac22232d
Changes
9
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
144 additions
and
95 deletions
+144
-95
AMDiS/src/parallel/MeshDistributor.cc
AMDiS/src/parallel/MeshDistributor.cc
+6
-6
AMDiS/src/parallel/MeshDistributor.h
AMDiS/src/parallel/MeshDistributor.h
+9
-9
AMDiS/src/parallel/PetscProblemStat.cc
AMDiS/src/parallel/PetscProblemStat.cc
+10
-1
AMDiS/src/parallel/PetscProblemStat.h
AMDiS/src/parallel/PetscProblemStat.h
+7
-1
AMDiS/src/parallel/PetscSolver.cc
AMDiS/src/parallel/PetscSolver.cc
+31
-0
AMDiS/src/parallel/PetscSolver.h
AMDiS/src/parallel/PetscSolver.h
+14
-0
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
+48
-66
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
+10
-3
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+9
-9
No files found.
AMDiS/src/parallel/MeshDistributor.cc
View file @
2e22f957
...
...
@@ -78,7 +78,7 @@ namespace AMDiS {
partitioner
(
NULL
),
nRankDofs
(
0
),
nOverallDofs
(
0
),
r
start
(
0
),
r
StartDofs
(
0
),
deserialized
(
false
),
writeSerializationFile
(
false
),
repartitioningAllowed
(
false
),
...
...
@@ -1837,13 +1837,13 @@ namespace AMDiS {
// Get displacment for global rank DOF ordering and global DOF number.
nRankDofs
=
rankDofs
.
size
();
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
start
,
nOverallDofs
);
mpi
::
getDofNumbering
(
mpiComm
,
nRankDofs
,
r
StartDofs
,
nOverallDofs
);
// Stores for all rank owned DOFs a new global index.
DofIndexMap
rankDofsNewGlobalIndex
;
for
(
int
i
=
0
;
i
<
nRankDofs
;
i
++
)
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
start
;
rankDofsNewGlobalIndex
[
rankDofs
[
i
]]
=
i
+
r
StartDofs
;
// === Send and receive new DOF indices. ===
...
...
@@ -1906,7 +1906,7 @@ namespace AMDiS {
MSG
(
"------------- Debug information -------------
\n
"
);
MSG
(
"nRankDofs = %d
\n
"
,
nRankDofs
);
MSG
(
"nOverallDofs = %d
\n
"
,
nOverallDofs
);
MSG
(
"r
start %d
\n
"
,
rstart
);
MSG
(
"r
StartDofs %d
\n
"
,
rStartDofs
);
stringstream
oss
;
oss
<<
debugOutputDir
<<
"elementIndex-"
<<
mpiRank
<<
".vtu"
;
...
...
@@ -2170,7 +2170,7 @@ namespace AMDiS {
serialize
(
out
,
periodicDof
);
serialize
(
out
,
periodicDofAssociations
);
SerUtil
::
serialize
(
out
,
r
start
);
SerUtil
::
serialize
(
out
,
r
StartDofs
);
SerUtil
::
serialize
(
out
,
macroElementNeighbours
);
int
nSize
=
allMacroElements
.
size
();
...
...
@@ -2231,7 +2231,7 @@ namespace AMDiS {
deserialize
(
in
,
periodicDof
);
deserialize
(
in
,
periodicDofAssociations
);
SerUtil
::
deserialize
(
in
,
r
start
);
SerUtil
::
deserialize
(
in
,
r
StartDofs
);
SerUtil
::
deserialize
(
in
,
macroElementNeighbours
);
int
nSize
=
0
;
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
2e22f957
...
...
@@ -136,6 +136,12 @@ namespace AMDiS {
return
nRankDofs
;
}
/// Returns \ref rStartDofs, the first global DOF index owned by rank.
inline
int
getStartDofs
()
{
return
rStartDofs
;
}
/// Returns \ref nOverallDofs, the global number of DOFs.
inline
int
getNumberOverallDofs
()
{
...
...
@@ -225,11 +231,6 @@ namespace AMDiS {
return
lastMeshChangeIndex
;
}
inline
int
getRstart
()
{
return
rstart
;
}
inline
int
getMpiRank
()
{
return
mpiRank
;
...
...
@@ -521,6 +522,9 @@ namespace AMDiS {
/// Number of DOFs in the rank mesh.
int
nRankDofs
;
/// Is the index of the first global DOF index, which is owned by the rank.
int
rStartDofs
;
/// Number of DOFs in the whole domain.
int
nOverallDofs
;
...
...
@@ -604,10 +608,6 @@ namespace AMDiS {
/// repartitioned.
vector
<
DOFVector
<
double
>*>
interchangeVectors
;
/// Is the index of the first row of the linear system, which is owned by
/// the rank.
int
rstart
;
/** \brief
* If the problem definition has been read from a serialization file, this
* variable is true, otherwise it is false. This variable is used to stop the
...
...
AMDiS/src/parallel/PetscProblemStat.cc
View file @
2e22f957
...
...
@@ -25,7 +25,8 @@ namespace AMDiS {
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
ProblemIterationInterface
*
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
)
:
ParallelProblemStatBase
(
nameStr
,
problemIteration
),
deleteSolver
(
true
)
{
FUNCNAME
(
"PetscProblemStat::PetscProblemStat()"
);
...
...
@@ -54,6 +55,14 @@ namespace AMDiS {
}
PetscProblemStat
::
PetscProblemStat
(
string
nameStr
,
PetscSolver
*
solver
)
:
ParallelProblemStatBase
(
nameStr
,
NULL
),
petscSolver
(
solver
),
deleteSolver
(
false
)
{}
void
PetscProblemStat
::
initialize
(
Flag
initFlag
,
ProblemStatSeq
*
adoptProblem
,
Flag
adoptFlag
)
...
...
AMDiS/src/parallel/PetscProblemStat.h
View file @
2e22f957
...
...
@@ -40,9 +40,13 @@ namespace AMDiS {
PetscProblemStat
(
std
::
string
nameStr
,
ProblemIterationInterface
*
problemIteration
=
NULL
);
PetscProblemStat
(
std
::
string
nameStr
,
PetscSolver
*
solver
);
~
PetscProblemStat
()
{
delete
petscSolver
;
if
(
deleteSolver
)
delete
petscSolver
;
}
void
initialize
(
Flag
initFlag
,
...
...
@@ -55,6 +59,8 @@ namespace AMDiS {
protected:
PetscSolver
*
petscSolver
;
bool
deleteSolver
;
};
typedef
PetscProblemStat
ParallelProblemStat
;
...
...
AMDiS/src/parallel/PetscSolver.cc
View file @
2e22f957
...
...
@@ -40,4 +40,35 @@ namespace AMDiS {
}
}
void
PetscSolver
::
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
)
{
FUNCNAME
(
"PetscSolver::copyVec()"
);
IS
originIs
,
destIs
;
ISCreateGeneral
(
PETSC_COMM_WORLD
,
originIndex
.
size
(),
&
(
originIndex
[
0
]),
PETSC_USE_POINTER
,
&
originIs
);
ISCreateGeneral
(
PETSC_COMM_WORLD
,
destIndex
.
size
(),
&
(
destIndex
[
0
]),
PETSC_USE_POINTER
,
&
destIs
);
VecScatter
scatter
;
VecScatterCreate
(
originVec
,
originIs
,
destVec
,
destIs
,
&
scatter
);
VecScatterBegin
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
VecScatterEnd
(
scatter
,
originVec
,
destVec
,
INSERT_VALUES
,
SCATTER_FORWARD
);
ISDestroy
(
&
originIs
);
ISDestroy
(
&
destIs
);
VecScatterDestroy
(
&
scatter
);
}
}
AMDiS/src/parallel/PetscSolver.h
View file @
2e22f957
...
...
@@ -90,6 +90,20 @@ namespace AMDiS {
bool
iterationCounter
=
true
,
bool
residual
=
true
);
/** \brief
* Copies between to PETSc vectors by using different index sets for the
* origin and the destination vectors.
*
* \param[in] originVec The PETSc vector from which we copy from.
* \param[out] destVec The PETSc vector we copy too.
* \param[in] originIndex Set of global indices referring to the
* origin vector.
* \param[in] destIndex Set of global indices referring to the
* destination vector.
*/
void
copyVec
(
Vec
&
originVec
,
Vec
&
destVec
,
vector
<
int
>&
originIndex
,
vector
<
int
>&
destIndex
);
protected:
MeshDistributor
*
meshDistributor
;
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
2e22f957
...
...
@@ -25,22 +25,14 @@ namespace AMDiS {
double
wtime
=
MPI
::
Wtime
();
nComponents
=
mat
->
getNumRows
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
// === Create PETSc vector (solution and a temporary vector). ===
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscSolVec
);
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscTmpVec
);
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 1 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
#endif
nestMat
=
new
Mat
*
[
nComponents
];
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
nestMat
[
i
]
=
new
Mat
[
nComponents
];
nestMat
.
resize
(
nComponents
*
nComponents
);
// === Transfer values from DOF matrices to the PETSc matrix. ===
...
...
@@ -51,16 +43,17 @@ namespace AMDiS {
nRankRows
,
nRankRows
,
nOverallRows
,
nOverallRows
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
(
nestMat
[
i
][
j
]));
setDofMatrix
(
nestMat
[
i
][
j
],
(
*
mat
)[
i
][
j
]);
&
(
nestMat
[
i
*
nComponents
+
j
]));
setDofMatrix
(
nestMat
[
i
*
nComponents
+
j
],
(
*
mat
)[
i
][
j
]);
MatAssemblyBegin
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
nestMat
[
i
*
nComponents
+
j
],
MAT_FINAL_ASSEMBLY
);
}
else
{
nestMat
[
i
][
j
]
=
PETSC_NULL
;
nestMat
[
i
*
nComponents
+
j
]
=
PETSC_NULL
;
}
MatCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
nComponents
,
PETSC_NULL
,
&
(
nestMat
[
0
]
[
0
]
),
&
petscMatrix
);
&
(
nestMat
[
0
]),
&
petscMatrix
);
#if (DEBUG != 0)
MSG
(
"Fill petsc matrix 2 needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
...
...
@@ -71,12 +64,11 @@ namespace AMDiS {
// === Init PETSc solver. ===
KSPCreate
(
PETSC_COMM_WORLD
,
&
solver
);
KSPGetPC
(
solver
,
&
pc
);
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetTolerances
(
solver
,
0.0
,
1e-8
,
PETSC_DEFAULT
,
PETSC_DEFAULT
);
KSPSetType
(
solver
,
KSPBCGS
);
KSPSetFromOptions
(
solver
);
PCSetFromOptions
(
pc
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
...
...
@@ -89,15 +81,22 @@ namespace AMDiS {
TEST_EXIT_DBG
(
vec
)(
"NO DOF vector defined!
\n
"
);
nComponents
=
vec
->
getSize
();
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
()
*
nComponents
;
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
()
*
nComponents
;
int
nRankRows
=
meshDistributor
->
getNumberRankDofs
();
int
nOverallRows
=
meshDistributor
->
getNumberOverallDofs
();
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
petscRhsVec
);
nestVec
.
resize
(
nComponents
);
// === Transfer values from DOF vector to the PETSc vector. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankRows
,
nOverallRows
,
&
(
nestVec
[
i
]));
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
setDofVector
(
petscRhsVec
,
vec
->
getDOFVector
(
i
),
nComponents
,
i
);
setDofVector
(
nestVec
[
i
],
vec
->
getDOFVector
(
i
));
VecAssemblyBegin
(
nestVec
[
i
]);
VecAssemblyEnd
(
nestVec
[
i
]);
}
VecCreateNest
(
PETSC_COMM_WORLD
,
nComponents
,
PETSC_NULL
,
&
(
nestVec
[
0
]),
&
petscRhsVec
);
VecAssemblyBegin
(
petscRhsVec
);
VecAssemblyEnd
(
petscRhsVec
);
...
...
@@ -110,21 +109,24 @@ namespace AMDiS {
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
// PETSc.
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Sol
Vec
);
KSPSolve
(
solver
,
petscRhsVec
,
petsc
Rhs
Vec
);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
petscSolVec
,
&
vecPointer
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
DOFVector
<
double
>
&
dofvec
=
*
(
vec
.
getDOFVector
(
i
));
Vec
tmp
;
VecNestGetSubVec
(
petscRhsVec
,
i
,
&
tmp
);
int
nRankDofs
=
meshDistributor
->
getNumberRankDofs
();
PetscScalar
*
vecPointer
;
VecGetArray
(
tmp
,
&
vecPointer
);
for
(
int
j
=
0
;
j
<
nRankDofs
;
j
++
)
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
vecPointer
[
i
*
nComponents
+
j
];
}
dofvec
[
meshDistributor
->
mapLocalToDofIndex
(
j
)]
=
vecPointer
[
j
];
VecRestoreArray
(
petscSolVec
,
&
vecPointer
);
VecRestoreArray
(
tmp
,
&
vecPointer
);
}
// === Synchronize DOFs at common DOFs, i.e., DOFs that correspond to ===
...
...
@@ -132,12 +134,10 @@ namespace AMDiS {
meshDistributor
->
synchVector
(
vec
);
// Print iteration counter and residual norm of the solution.
printSolutionInfo
(
adaptInfo
);
// === Destroy PETSc's variables. ===
VecDestroy
(
&
petscRhsVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
VecDestroy
(
&
(
nestVec
[
i
]));
}
...
...
@@ -145,20 +145,12 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::destroyMatrixData()"
);
for
(
unsigned
int
i
=
0
;
i
<
nestMat
.
size
();
i
++
)
if
(
nestMat
[
i
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
]));
MatDestroy
(
&
petscMatrix
);
KSPDestroy
(
&
solver
);
VecDestroy
(
&
petscSolVec
);
VecDestroy
(
&
petscTmpVec
);
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
if
(
nestMat
[
i
][
j
]
!=
PETSC_NULL
)
MatDestroy
(
&
(
nestMat
[
i
][
j
]));
}
delete
[]
nestMat
[
i
];
}
delete
[]
nestMat
;
}
...
...
@@ -209,32 +201,22 @@ namespace AMDiS {
cols
.
push_back
(
colIndex
);
values
.
push_back
(
value
(
*
icursor
));
}
MatSetValues
(
petscMat
rix
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
MatSetValues
(
petscMat
,
1
,
&
rowIndex
,
cols
.
size
(),
&
(
cols
[
0
]),
&
(
values
[
0
]),
ADD_VALUES
);
}
}
void
PetscSolverGlobalBlockMatrix
::
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
nComponents
,
int
row
,
bool
rankOnly
)
DOFVector
<
double
>*
vec
)
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::setDofVector()"
);
// Traverse all used DOFs in the dof vector.
DOFVector
<
double
>::
Iterator
dofIt
(
vec
,
USED_DOFS
);
for
(
dofIt
.
reset
();
!
dofIt
.
end
();
++
dofIt
)
{
if
(
rankOnly
&&
!
meshDistributor
->
getIsRankDof
(
dofIt
.
getDOFIndex
()))
continue
;
// Calculate global row index of the DOF.
DegreeOfFreedom
globalRowDof
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
// Calculate PETSc index of the row DOF.
int
index
=
nComponents
*
row
+
globalRowDof
;
int
index
=
meshDistributor
->
mapLocalToGlobal
(
dofIt
.
getDOFIndex
());
double
value
=
*
dofIt
;
VecSetValues
(
petscVec
,
1
,
&
index
,
&
value
,
ADD_VALUES
);
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.h
View file @
2e22f957
...
...
@@ -52,11 +52,18 @@ namespace AMDiS {
void
setDofMatrix
(
Mat
&
petscMat
,
DOFMatrix
*
mat
);
/// Takes a DOF vector and sends its values to a given PETSc vector.
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
,
int
nComponents
,
int
row
,
bool
rankOnly
=
false
);
void
setDofVector
(
Vec
&
petscVec
,
DOFVector
<
double
>*
vec
);
virtual
void
setBlockPreconditioner
(
PC
&
pc
)
{
PCSetFromOptions
(
pc
);
}
protected:
Mat
**
nestMat
;
vector
<
Mat
>
nestMat
;
vector
<
Vec
>
nestVec
;
int
nComponents
;
};
...
...
AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
View file @
2e22f957
...
...
@@ -66,9 +66,9 @@ namespace AMDiS {
#if (DEBUG != 0)
int
a
,
b
;
MatGetOwnershipRange
(
petscMatrix
,
&
a
,
&
b
);
TEST_EXIT
(
a
==
meshDistributor
->
get
Rstart
()
*
nComponents
)
TEST_EXIT
(
a
==
meshDistributor
->
get
StartDofs
()
*
nComponents
)
(
"Wrong matrix ownership range!
\n
"
);
TEST_EXIT
(
b
==
meshDistributor
->
get
Rstart
()
*
nComponents
+
nRankRows
)
TEST_EXIT
(
b
==
meshDistributor
->
get
StartDofs
()
*
nComponents
+
nRankRows
)
(
"Wrong matrix ownership range!
\n
"
);
#endif
...
...
@@ -552,7 +552,7 @@ namespace AMDiS {
// This is the local row index of the local PETSc matrix.
int
localPetscRowIdx
=
petscRowIdx
-
meshDistributor
->
get
Rstart
()
*
nComponents
;
petscRowIdx
-
meshDistributor
->
get
StartDofs
()
*
nComponents
;
TEST_EXIT_DBG
(
localPetscRowIdx
>=
0
&&
localPetscRowIdx
<
nRankRows
)
(
"Should not happen!
\n
Debug info: localRowIdx = %d globalRowIndx = %d petscRowIdx = %d localPetscRowIdx = %d rStart = %d nCompontens = %d nRankRows = %d
\n
"
,
...
...
@@ -560,7 +560,7 @@ namespace AMDiS {
meshDistributor
->
mapLocalToGlobal
(
*
cursor
),
petscRowIdx
,
localPetscRowIdx
,
meshDistributor
->
get
Rstart
(),
meshDistributor
->
get
StartDofs
(),
nComponents
,
nRankRows
);
...
...
@@ -574,8 +574,8 @@ namespace AMDiS {
if
(
value
(
*
icursor
)
!=
0.0
||
petscRowIdx
==
petscColIdx
)
{
// The row DOF is a rank DOF, if also the column is a rank DOF,
// increment the d_nnz values for this row, otherwise the o_nnz value.
if
(
petscColIdx
>=
meshDistributor
->
get
Rstart
()
*
nComponents
&&
petscColIdx
<
meshDistributor
->
get
Rstart
()
*
nComponents
+
nRankRows
)
if
(
petscColIdx
>=
meshDistributor
->
get
StartDofs
()
*
nComponents
&&
petscColIdx
<
meshDistributor
->
get
StartDofs
()
*
nComponents
+
nRankRows
)
d_nnz
[
localPetscRowIdx
]
++
;
else
o_nnz
[
localPetscRowIdx
]
++
;
...
...
@@ -626,14 +626,14 @@ namespace AMDiS {
int
r
=
it
->
second
[
i
].
first
;
int
c
=
it
->
second
[
i
].
second
;
int
localRowIdx
=
r
-
meshDistributor
->
get
Rstart
()
*
nComponents
;
int
localRowIdx
=
r
-
meshDistributor
->
get
StartDofs
()
*
nComponents
;
TEST_EXIT_DBG
(
localRowIdx
>=
0
&&
localRowIdx
<
nRankRows
)
(
"Got row index %d/%d (nRankRows = %d) from rank %d. Should not happen!
\n
"
,
r
,
localRowIdx
,
nRankRows
,
it
->
first
);
if
(
c
<
meshDistributor
->
get
Rstart
()
*
nComponents
||
c
>=
meshDistributor
->
get
Rstart
()
*
nComponents
+
nRankRows
)
if
(
c
<
meshDistributor
->
get
StartDofs
()
*
nComponents
||
c
>=
meshDistributor
->
get
StartDofs
()
*
nComponents
+
nRankRows
)
o_nnz
[
localRowIdx
]
++
;
else
d_nnz
[
localRowIdx
]
++
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment