Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
amdis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Aland, Sebastian
amdis
Commits
2eaf89db
Commit
2eaf89db
authored
Oct 10, 2012
by
Thomas Witkowski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Added PETSc helper namespace, simplicies explicit schur primal computations in FETI-DP code.
parent
fc3bdbaa
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
195 additions
and
54 deletions
+195
-54
AMDiS/CMakeLists.txt
AMDiS/CMakeLists.txt
+1
-0
AMDiS/src/parallel/PetscHelper.cc
AMDiS/src/parallel/PetscHelper.cc
+75
-0
AMDiS/src/parallel/PetscHelper.h
AMDiS/src/parallel/PetscHelper.h
+76
-0
AMDiS/src/parallel/PetscSolverFeti.cc
AMDiS/src/parallel/PetscSolverFeti.cc
+43
-54
No files found.
AMDiS/CMakeLists.txt
View file @
2eaf89db
...
@@ -257,6 +257,7 @@ if(ENABLE_PARALLEL_DOMAIN)
...
@@ -257,6 +257,7 @@ if(ENABLE_PARALLEL_DOMAIN)
list
(
APPEND PARALLEL_DOMAIN_AMDIS_SRC
list
(
APPEND PARALLEL_DOMAIN_AMDIS_SRC
${
SOURCE_DIR
}
/parallel/BddcMlSolver.cc
${
SOURCE_DIR
}
/parallel/BddcMlSolver.cc
${
SOURCE_DIR
}
/parallel/ParallelCoarseSpaceMatVec.cc
${
SOURCE_DIR
}
/parallel/ParallelCoarseSpaceMatVec.cc
${
SOURCE_DIR
}
/parallel/PetscHelper.cc
${
SOURCE_DIR
}
/parallel/PetscMultigridPrecon.cc
${
SOURCE_DIR
}
/parallel/PetscMultigridPrecon.cc
${
SOURCE_DIR
}
/parallel/PetscSolver.cc
${
SOURCE_DIR
}
/parallel/PetscSolver.cc
${
SOURCE_DIR
}
/parallel/PetscProblemStat.cc
${
SOURCE_DIR
}
/parallel/PetscProblemStat.cc
...
...
AMDiS/src/parallel/PetscHelper.cc
0 → 100644
View file @
2eaf89db
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
#include "parallel/PetscHelper.h"
namespace
AMDiS
{
namespace
petsc_helper
{
using
namespace
std
;
void
getMatLocalColumn
(
Mat
mat
,
PetscMatCol
&
matCol
)
{
PetscInt
firstIndex
,
lastIndex
;
MatGetOwnershipRange
(
mat
,
&
firstIndex
,
&
lastIndex
);
PetscInt
nCols
;
const
PetscInt
*
cols
;
const
PetscScalar
*
values
;
for
(
int
row
=
firstIndex
;
row
<
lastIndex
;
row
++
)
{
MatGetRow
(
mat
,
row
,
&
nCols
,
&
cols
,
&
values
);
for
(
int
i
=
0
;
i
<
nCols
;
i
++
)
{
if
(
values
[
i
]
!=
0.0
)
{
matCol
[
cols
[
i
]].
first
.
push_back
(
row
-
firstIndex
);
matCol
[
cols
[
i
]].
second
.
push_back
(
values
[
i
]);
}
}
MatRestoreRow
(
mat
,
row
,
&
nCols
,
&
cols
,
&
values
);
}
}
void
setMatLocalColumn
(
Mat
mat
,
int
column
,
Vec
vec
)
{
PetscInt
firstIndex
;
MatGetOwnershipRange
(
mat
,
&
firstIndex
,
PETSC_NULL
);
PetscInt
vecSize
;
VecGetLocalSize
(
vec
,
&
vecSize
);
PetscScalar
*
tmpValues
;
VecGetArray
(
vec
,
&
tmpValues
);
for
(
int
i
=
0
;
i
<
vecSize
;
i
++
)
MatSetValue
(
mat
,
firstIndex
+
i
,
column
,
tmpValues
[
i
],
ADD_VALUES
);
VecRestoreArray
(
vec
,
&
tmpValues
);
}
void
getColumnVec
(
const
SparseCol
&
matCol
,
Vec
vec
)
{
VecSet
(
vec
,
0.0
);
VecSetValues
(
vec
,
matCol
.
first
.
size
(),
&
(
matCol
.
first
[
0
]),
&
(
matCol
.
second
[
0
]),
INSERT_VALUES
);
VecAssemblyBegin
(
vec
);
VecAssemblyEnd
(
vec
);
}
}
}
AMDiS/src/parallel/PetscHelper.h
0 → 100644
View file @
2eaf89db
// ============================================================================
// == ==
// == AMDiS - Adaptive multidimensional simulations ==
// == ==
// == http://www.amdis-fem.org ==
// == ==
// ============================================================================
//
// Software License for AMDiS
//
// Copyright (c) 2010 Dresden University of Technology
// All rights reserved.
// Authors: Simon Vey, Thomas Witkowski et al.
//
// This file is part of AMDiS
//
// See also license.opensource.txt in the distribution.
/** \file PetscHelper.h */
#ifndef AMDIS_PETSCHELPER_H
#define AMDIS_PETSCHELPER_H
#include <mpi.h>
#include <map>
#include <vector>
#include <petsc.h>
namespace
AMDiS
{
using
namespace
std
;
/** \brief
* In this namespace, we collect several auxiliary functions for using
* PETSc in AMDiS. Many of these function may be replaced by new PETSc
* function in upcoming versions.
*/
namespace
petsc_helper
{
/// Defines a PETSc matrix column wise
typedef
pair
<
vector
<
int
>
,
vector
<
double
>
>
SparseCol
;
typedef
map
<
int
,
SparseCol
>
PetscMatCol
;
/** \brief
* Returns for a distributed matrix on each rank the local matrix in a
* sparce column format.
*
* \param[in] mat PETSc distributerd matrix.
* \param[out] matCol The sparse column represenation of the local matrix.
*/
void
getMatLocalColumn
(
Mat
mat
,
PetscMatCol
&
matCol
);
/** \brief
* Set a local column vector in a distributed matrix.
*
* \param[out] mat Distributed matrix.
* \param[in] column Column index.
* \param[in] vec Column vector.
*/
void
setMatLocalColumn
(
Mat
mat
,
int
column
,
Vec
vec
);
/** \brief
* Create a local PETSc vector representing the column of a matrix
* stored in \ref PetscMatCol type format.
*
* \param[in] column Sparse column representation.
* \param[out] vec Vector representing one column of the matrix.
*/
void
getColumnVec
(
const
SparseCol
&
matCol
,
Vec
vec
);
}
}
#endif
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
2eaf89db
...
@@ -12,6 +12,7 @@
...
@@ -12,6 +12,7 @@
#include "AMDiS.h"
#include "AMDiS.h"
#include "MatrixVector.h"
#include "MatrixVector.h"
#include "parallel/PetscHelper.h"
#include "parallel/PetscSolverFeti.h"
#include "parallel/PetscSolverFeti.h"
#include "parallel/PetscSolverFetiDebug.h"
#include "parallel/PetscSolverFetiDebug.h"
#include "parallel/PetscSolverFetiMonitor.h"
#include "parallel/PetscSolverFetiMonitor.h"
...
@@ -745,6 +746,9 @@ namespace AMDiS {
...
@@ -745,6 +746,9 @@ namespace AMDiS {
TEST_EXIT_DBG
(
isPrimal
(
feSpaces
[
i
],
**
it
)
==
false
)
TEST_EXIT_DBG
(
isPrimal
(
feSpaces
[
i
],
**
it
)
==
false
)
(
"Should not be primal!
\n
"
);
(
"Should not be primal!
\n
"
);
if
(
dirichletRows
[
feSpaces
[
i
]].
count
(
**
it
))
continue
;
int
col
=
lagrangeMap
.
getMatIndex
(
i
,
**
it
);
int
col
=
lagrangeMap
.
getMatIndex
(
i
,
**
it
);
double
value
=
1.0
;
double
value
=
1.0
;
MatSetValue
(
mat_augmented_lagrange
,
rowCounter
,
col
,
value
,
INSERT_VALUES
);
MatSetValue
(
mat_augmented_lagrange
,
rowCounter
,
col
,
value
,
INSERT_VALUES
);
...
@@ -818,86 +822,68 @@ namespace AMDiS {
...
@@ -818,86 +822,68 @@ namespace AMDiS {
}
else
{
}
else
{
MSG
(
"Create direct schur primal solver!
\n
"
);
MSG
(
"Create direct schur primal solver!
\n
"
);
TEST_EXIT
(
!
augmentedLagrange
)(
"Not yet supported!
\n
"
);
double
wtime
=
MPI
::
Wtime
();
double
wtime
=
MPI
::
Wtime
();
TEST_EXIT
(
!
augmentedLagrange
)(
"Not yet supported!
\n
"
);
TEST_EXIT_DBG
(
meshLevel
==
0
)
TEST_EXIT_DBG
(
meshLevel
==
0
)
(
"Does not support for multilevel, check usage of localDofMap.
\n
"
);
(
"Does not support for multilevel, check usage of localDofMap.
\n
"
);
// === First, calculate matK = inv(A_BB) A_BPi: ===
// === - get all local column vectors from A_BPi ===
// === - solve with A_BB for this column vector as the rhs vector ===
// === - set the result to the corresponding column vector of ===
// === matrix matK ===
int
nRowsRankPrimal
=
primalDofMap
.
getRankDofs
();
int
nRowsRankPrimal
=
primalDofMap
.
getRankDofs
();
int
nRowsOverallPrimal
=
primalDofMap
.
getOverallDofs
();
int
nRowsOverallPrimal
=
primalDofMap
.
getOverallDofs
();
int
nRowsRankB
=
localDofMap
.
getRankDofs
();
int
nRowsRankB
=
localDofMap
.
getRankDofs
();
Mat
matBPi
;
// Transform matrix A_BPi into a sparse column format.
MatCreateAIJ
(
mpiCommGlobal
,
petsc_helper
::
PetscMatCol
mat_b_primal_cols
;
nRowsRankB
,
nRowsRankPrimal
,
petsc_helper
::
getMatLocalColumn
(
subdomain
->
getMatInteriorCoarse
(),
nGlobalOverallInterior
,
nRowsOverallPrimal
,
mat_b_primal_cols
);
150
,
PETSC_NULL
,
150
,
PETSC_NULL
,
&
matBPi
);
MatSetOption
(
matBPi
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
PetscInt
nCols
;
const
PetscInt
*
cols
;
const
PetscScalar
*
values
;
map
<
int
,
vector
<
pair
<
int
,
double
>
>
>
mat_b_primal_cols
;
for
(
int
i
=
0
;
i
<
nRowsRankB
;
i
++
)
{
PetscInt
row
=
localDofMap
.
getStartDofs
()
+
i
;
MatGetRow
(
subdomain
->
getMatInteriorCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
if
(
values
[
j
]
!=
0.0
)
mat_b_primal_cols
[
cols
[
j
]].
push_back
(
make_pair
(
i
,
values
[
j
]));
MatRestoreRow
(
subdomain
->
getMatInteriorCoarse
(),
row
,
&
nCols
,
&
cols
,
&
values
);
}
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
TEST_EXIT
(
static_cast
<
int
>
(
mat_b_primal_cols
.
size
())
==
primalDofMap
.
getLocalDofs
())
primalDofMap
.
getLocalDofs
())
(
"Should not happen!
\n
"
);
(
"Should not happen!
\n
"
);
for
(
map
<
int
,
vector
<
pair
<
int
,
double
>
>
>::
iterator
it
=
mat_b_primal_cols
.
begin
();
it
!=
mat_b_primal_cols
.
end
();
++
it
)
{
Vec
tmpVec
;
Vec
tmpVec
;
VecCreateSeq
(
PETSC_COMM_SELF
,
nRowsRankB
,
&
tmpVec
);
VecCreateSeq
(
PETSC_COMM_SELF
,
nRowsRankB
,
&
tmpVec
);
for
(
unsigned
int
i
=
0
;
i
<
it
->
second
.
size
();
i
++
)
Mat
matK
;
VecSetValue
(
tmpVec
,
MatCreateAIJ
(
mpiCommGlobal
,
it
->
second
[
i
].
first
,
it
->
second
[
i
].
second
,
INSERT_VALUES
);
nRowsRankB
,
nRowsRankPrimal
,
nGlobalOverallInterior
,
nRowsOverallPrimal
,
VecAssemblyBegin
(
tmpVec
);
150
,
PETSC_NULL
,
150
,
PETSC_NULL
,
&
matK
);
VecAssemblyEnd
(
tmpVec
);
MatSetOption
(
matK
,
MAT_NEW_NONZERO_ALLOCATION_ERR
,
PETSC_FALSE
);
// Solve for all column vectors of mat A_BPi and create matrix matK
for
(
petsc_helper
::
PetscMatCol
::
iterator
it
=
mat_b_primal_cols
.
begin
();
it
!=
mat_b_primal_cols
.
end
();
++
it
)
{
petsc_helper
::
getColumnVec
(
it
->
second
,
tmpVec
);
subdomain
->
solve
(
tmpVec
,
tmpVec
);
subdomain
->
solve
(
tmpVec
,
tmpVec
);
petsc_helper
::
setMatLocalColumn
(
matK
,
it
->
first
,
tmpVec
);
PetscScalar
*
tmpValues
;
}
VecGetArray
(
tmpVec
,
&
tmpValues
);
for
(
int
i
=
0
;
i
<
nRowsRankB
;
i
++
)
MatSetValue
(
matBPi
,
localDofMap
.
getStartDofs
()
+
i
,
it
->
first
,
tmpValues
[
i
],
ADD_VALUES
);
VecRestoreArray
(
tmpVec
,
&
tmpValues
);
VecDestroy
(
&
tmpVec
);
VecDestroy
(
&
tmpVec
);
}
MatAssemblyBegin
(
mat
BPi
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyBegin
(
mat
K
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat
BPi
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
mat
K
,
MAT_FINAL_ASSEMBLY
);
// Calculate: mat_schur_primal = A_PiPi - A_PiB inv(A_BB) ABPi
// = A_PiPi - A_PiB matK
MatDuplicate
(
subdomain
->
getMatCoarse
(),
MAT_COPY_VALUES
,
MatDuplicate
(
subdomain
->
getMatCoarse
(),
MAT_COPY_VALUES
,
&
mat_schur_primal
);
&
mat_schur_primal
);
Mat
matPrimal
;
Mat
matPrimal
;
MatMatMult
(
subdomain
->
getMatCoarseInterior
(),
mat
BPi
,
MAT_INITIAL_MATRIX
,
MatMatMult
(
subdomain
->
getMatCoarseInterior
(),
mat
K
,
MAT_INITIAL_MATRIX
,
PETSC_DEFAULT
,
&
matPrimal
);
PETSC_DEFAULT
,
&
matPrimal
);
MatAXPY
(
mat_schur_primal
,
-
1.0
,
matPrimal
,
DIFFERENT_NONZERO_PATTERN
);
MatAXPY
(
mat_schur_primal
,
-
1.0
,
matPrimal
,
DIFFERENT_NONZERO_PATTERN
);
MatDestroy
(
&
matPrimal
);
MatDestroy
(
&
matPrimal
);
MatDestroy
(
&
matBPi
);
MatDestroy
(
&
matK
);
// === Create KSP solver object and set appropriate solver options. ====
KSPCreate
(
mpiCommGlobal
,
&
ksp_schur_primal
);
KSPCreate
(
mpiCommGlobal
,
&
ksp_schur_primal
);
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
KSPSetOperators
(
ksp_schur_primal
,
mat_schur_primal
,
mat_schur_primal
,
...
@@ -910,6 +896,9 @@ namespace AMDiS {
...
@@ -910,6 +896,9 @@ namespace AMDiS {
PCFactorSetMatSolverPackage
(
pc_schur_primal
,
MATSOLVERMUMPS
);
PCFactorSetMatSolverPackage
(
pc_schur_primal
,
MATSOLVERMUMPS
);
KSPSetFromOptions
(
ksp_schur_primal
);
KSPSetFromOptions
(
ksp_schur_primal
);
// === And finally print timings, if required. ===
if
(
printTimings
)
{
if
(
printTimings
)
{
MPI
::
COMM_WORLD
.
Barrier
();
MPI
::
COMM_WORLD
.
Barrier
();
MatInfo
minfo
;
MatInfo
minfo
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment