Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
iwr
amdis
Commits
26dceb38
Commit
26dceb38
authored
Dec 11, 2011
by
Thomas Witkowski
Browse files
Fixed our FETI-DP bug...
parent
7c6dd8a4
Changes
3
Show whitespace changes
Inline
Side-by-side
AMDiS/src/AMDiS.cc
View file @
26dceb38
...
@@ -44,11 +44,7 @@ namespace AMDiS {
...
@@ -44,11 +44,7 @@ namespace AMDiS {
TEST_EXIT
(
argc
>=
2
)(
"No init file!
\n
"
);
TEST_EXIT
(
argc
>=
2
)(
"No init file!
\n
"
);
if
(
string
(
argv
[
0
])
!=
"ddt"
)
Parameters
::
init
(
string
(
argv
[
1
]));
Parameters
::
init
(
string
(
argv
[
1
]));
else
Parameters
::
init
(
string
(
argv
[
1
]));
Parameters
::
readArgv
(
argc
,
argv
);
Parameters
::
readArgv
(
argc
,
argv
);
}
}
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
26dceb38
...
@@ -55,24 +55,17 @@ namespace AMDiS {
...
@@ -55,24 +55,17 @@ namespace AMDiS {
MatShellGetContext
(
mat
,
&
ctx
);
MatShellGetContext
(
mat
,
&
ctx
);
FetiData
*
data
=
static_cast
<
FetiData
*>
(
ctx
);
FetiData
*
data
=
static_cast
<
FetiData
*>
(
ctx
);
// tmp_vec_b0 = inv(K_BB) trans(L) x
MatMultTranspose
(
*
(
data
->
mat_lagrange
),
x
,
data
->
tmp_vec_b
);
MatMultTranspose
(
*
(
data
->
mat_lagrange
),
x
,
data
->
tmp_vec_b0
);
data
->
fetiSolver
->
solveLocalProblem
(
data
->
tmp_vec_b
,
data
->
tmp_vec_b
);
data
->
fetiSolver
->
solveLocalProblem
(
data
->
tmp_vec_b0
,
data
->
tmp_vec_b0
);
MatMult
(
*
(
data
->
mat_lagrange
),
data
->
tmp_vec_b
,
data
->
tmp_vec_lagrange
);
// tmp_vec_b1 = inv(K_BB) K_BPi inv(S_PiPi) K_PiB tmp_vec_b0
MatMult
(
*
(
data
->
mat_primal_b
),
data
->
tmp_vec_b0
,
data
->
tmp_vec_primal
);
double
first
=
MPI
::
Wtime
(
);
MatMult
(
*
(
data
->
mat_primal_b
),
data
->
tmp_vec_b
,
data
->
tmp_vec_primal
);
KSPSolve
(
*
(
data
->
ksp_schur_primal
),
data
->
tmp_vec_primal
,
data
->
tmp_vec_primal
);
KSPSolve
(
*
(
data
->
ksp_schur_primal
),
data
->
tmp_vec_primal
,
data
->
tmp_vec_primal
);
PetscSolverFeti
::
fetiStatistics
.
nSchurPrimalSolve
++
;
MatMult
(
*
(
data
->
mat_b_primal
),
data
->
tmp_vec_primal
,
data
->
tmp_vec_b
);
PetscSolverFeti
::
fetiStatistics
.
timeSchurPrimalSolve
+=
data
->
fetiSolver
->
solveLocalProblem
(
data
->
tmp_vec_b
,
data
->
tmp_vec_b
);
MPI
::
Wtime
()
-
first
;
MatMult
(
*
(
data
->
mat_lagrange
),
data
->
tmp_vec_b
,
y
);
MatMult
(
*
(
data
->
mat_b_primal
),
data
->
tmp_vec_primal
,
data
->
tmp_vec_b1
);
// y = L (tmp_vec_b0 + tmp_vec_b1)
VecAXPBY
(
y
,
1.0
,
1.0
,
data
->
tmp_vec_lagrange
);
VecAXPBY
(
data
->
tmp_vec_b0
,
1.0
,
1.0
,
data
->
tmp_vec_b1
);
MatMult
(
*
(
data
->
mat_lagrange
),
data
->
tmp_vec_b0
,
y
);
PetscSolverFeti
::
fetiStatistics
.
nFetiApply
++
;
PetscSolverFeti
::
fetiStatistics
.
nFetiApply
++
;
...
@@ -766,10 +759,10 @@ namespace AMDiS {
...
@@ -766,10 +759,10 @@ namespace AMDiS {
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
nRankB
*
nComponents
,
nOverallB
*
nComponents
,
&
(
fetiData
.
tmp_vec_b
0
));
&
(
fetiData
.
tmp_vec_b
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRank
B
*
nComponents
,
nOverall
B
*
nComponents
,
nRank
Lagrange
*
nComponents
,
nOverall
Lagrange
*
nComponents
,
&
(
fetiData
.
tmp_vec_
b1
));
&
(
fetiData
.
tmp_vec_
lagrange
));
VecCreateMPI
(
PETSC_COMM_WORLD
,
VecCreateMPI
(
PETSC_COMM_WORLD
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
nRankPrimals
*
nComponents
,
nOverallPrimals
*
nComponents
,
&
(
fetiData
.
tmp_vec_primal
));
&
(
fetiData
.
tmp_vec_primal
));
...
@@ -857,8 +850,8 @@ namespace AMDiS {
...
@@ -857,8 +850,8 @@ namespace AMDiS {
fetiData
.
fetiSolver
=
NULL
;
fetiData
.
fetiSolver
=
NULL
;
fetiData
.
ksp_schur_primal
=
PETSC_NULL
;
fetiData
.
ksp_schur_primal
=
PETSC_NULL
;
VecDestroy
(
&
fetiData
.
tmp_vec_b
0
);
VecDestroy
(
&
fetiData
.
tmp_vec_b
);
VecDestroy
(
&
fetiData
.
tmp_vec_
b1
);
VecDestroy
(
&
fetiData
.
tmp_vec_
lagrange
);
VecDestroy
(
&
fetiData
.
tmp_vec_primal
);
VecDestroy
(
&
fetiData
.
tmp_vec_primal
);
MatDestroy
(
&
mat_feti
);
MatDestroy
(
&
mat_feti
);
KSPDestroy
(
&
ksp_feti
);
KSPDestroy
(
&
ksp_feti
);
...
@@ -904,12 +897,6 @@ namespace AMDiS {
...
@@ -904,12 +897,6 @@ namespace AMDiS {
{
{
FUNCNAME
(
"PetscSolverFeti::recoverSolution()"
);
FUNCNAME
(
"PetscSolverFeti::recoverSolution()"
);
PetscScalar
*
ttt
;
VecGetArray
(
vec_sol_primal
,
&
ttt
);
for
(
int
i
=
0
;
i
<
nRankPrimals
;
i
++
)
MSG
(
"PRIM VAL = %f
\n
"
,
ttt
[
i
*
3
+
1
]);
VecRestoreArray
(
vec_sol_primal
,
&
ttt
);
// === Get local part of the solution for B variables. ===
// === Get local part of the solution for B variables. ===
PetscScalar
*
localSolB
;
PetscScalar
*
localSolB
;
...
@@ -928,8 +915,6 @@ namespace AMDiS {
...
@@ -928,8 +915,6 @@ namespace AMDiS {
for
(
DofMapping
::
iterator
it
=
globalPrimalIndex
.
begin
();
for
(
DofMapping
::
iterator
it
=
globalPrimalIndex
.
begin
();
it
!=
globalPrimalIndex
.
end
();
++
it
)
{
it
!=
globalPrimalIndex
.
end
();
++
it
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
MSG
(
"COPY GLOBAL PRIM %d of COMP %d TO LOCAL TMP %d
\n
"
,
it
->
second
,
i
,
counter
);
globalIsIndex
.
push_back
(
it
->
second
*
nComponents
+
i
);
globalIsIndex
.
push_back
(
it
->
second
*
nComponents
+
i
);
localIsIndex
.
push_back
(
counter
++
);
localIsIndex
.
push_back
(
counter
++
);
}
}
...
@@ -966,10 +951,6 @@ namespace AMDiS {
...
@@ -966,10 +951,6 @@ namespace AMDiS {
PetscScalar
*
localSolPrimal
;
PetscScalar
*
localSolPrimal
;
VecGetArray
(
local_sol_primal
,
&
localSolPrimal
);
VecGetArray
(
local_sol_primal
,
&
localSolPrimal
);
for
(
int
i
=
0
;
i
<
globalPrimalIndex
.
size
();
i
++
)
MSG
(
"TEST VAL %f
\n
"
,
localSolPrimal
[
i
*
3
+
1
]);
// === And copy from PETSc local vectors to the DOF vectors. ===
// === And copy from PETSc local vectors to the DOF vectors. ===
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
for
(
int
i
=
0
;
i
<
nComponents
;
i
++
)
{
...
@@ -984,17 +965,11 @@ namespace AMDiS {
...
@@ -984,17 +965,11 @@ namespace AMDiS {
int
counter
=
0
;
int
counter
=
0
;
for
(
DofMapping
::
iterator
it
=
globalPrimalIndex
.
begin
();
for
(
DofMapping
::
iterator
it
=
globalPrimalIndex
.
begin
();
it
!=
globalPrimalIndex
.
end
();
++
it
)
{
it
!=
globalPrimalIndex
.
end
();
++
it
)
{
if
(
i
==
1
)
MSG
(
"AND SET THE VAL OF DOF %d TO VAL %f
\n
"
,
it
->
first
,
localSolPrimal
[
counter
*
nComponents
+
i
]);
dofVec
[
it
->
first
]
=
localSolPrimal
[
counter
*
nComponents
+
i
];
dofVec
[
it
->
first
]
=
localSolPrimal
[
counter
*
nComponents
+
i
];
counter
++
;
counter
++
;
}
}
}
}
MSG
(
"VAL BOUNDS: %f %f
\n
"
,
vec
.
getDOFVector
(
1
)
->
min
(),
vec
.
getDOFVector
(
1
)
->
max
());
VecRestoreArray
(
vec_sol_b
,
&
localSolB
);
VecRestoreArray
(
vec_sol_b
,
&
localSolB
);
VecRestoreArray
(
local_sol_primal
,
&
localSolPrimal
);
VecRestoreArray
(
local_sol_primal
,
&
localSolPrimal
);
VecDestroy
(
&
local_sol_primal
);
VecDestroy
(
&
local_sol_primal
);
...
@@ -1409,6 +1384,204 @@ namespace AMDiS {
...
@@ -1409,6 +1384,204 @@ namespace AMDiS {
}
}
void
PetscSolverFeti
::
solveFetiMatrix
(
SystemVector
&
vec
)
{
FUNCNAME
(
"PetscSolverFeti::solveFetiMatrix()"
);
int
nOverallNest
=
(
nOverallB
+
nOverallPrimals
+
nOverallLagrange
)
*
nComponents
;
Mat
mat_lagrange_transpose
;
MatTranspose
(
mat_lagrange
,
MAT_INITIAL_MATRIX
,
&
mat_lagrange_transpose
);
Mat
A_global
;
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
PETSC_DECIDE
,
PETSC_DECIDE
,
nOverallNest
,
nOverallNest
,
30
,
PETSC_NULL
,
30
,
PETSC_NULL
,
&
A_global
);
Vec
A_global_rhs
,
A_global_sol
;
MatGetVecs
(
A_global
,
&
A_global_rhs
,
&
A_global_sol
);
PetscInt
nCols
;
const
PetscInt
*
cols
;
const
PetscScalar
*
vals
;
// === mat_b_b ===
for
(
int
i
=
0
;
i
<
nRankB
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartB
*
nComponents
+
i
;
MatGetRow
(
mat_b_b
,
i
,
&
nCols
,
&
cols
,
&
vals
);
MatSetValues
(
A_global
,
1
,
&
rowIndex
,
nCols
,
cols
,
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_b_b
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
PetscScalar
*
g_f_b
;
VecGetArray
(
f_b
,
&
g_f_b
);
for
(
int
i
=
0
;
i
<
nRankB
*
nComponents
;
i
++
)
VecSetValue
(
A_global_rhs
,
rStartB
*
nComponents
+
i
,
g_f_b
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
f_b
,
&
g_f_b
);
// === mat_primal_primal ===
for
(
int
i
=
0
;
i
<
nRankPrimals
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartPrimals
*
nComponents
+
i
;
MatGetRow
(
mat_primal_primal
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
int
rowIndexA
=
nOverallB
*
nComponents
+
rowIndex
;
vector
<
int
>
colsA
(
nCols
);
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
colsA
[
j
]
=
nOverallB
*
nComponents
+
cols
[
j
];
MatSetValues
(
A_global
,
1
,
&
rowIndexA
,
nCols
,
&
(
colsA
[
0
]),
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_primal_primal
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
PetscScalar
*
g_f_primal
;
VecGetArray
(
f_primal
,
&
g_f_primal
);
for
(
int
i
=
0
;
i
<
nRankPrimals
*
nComponents
;
i
++
)
VecSetValue
(
A_global_rhs
,
(
nOverallB
+
rStartPrimals
)
*
nComponents
+
i
,
g_f_primal
[
i
],
INSERT_VALUES
);
VecRestoreArray
(
f_primal
,
&
g_f_primal
);
// === mat_b_primal ===
for
(
int
i
=
0
;
i
<
nRankB
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartB
*
nComponents
+
i
;
MatGetRow
(
mat_b_primal
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
vector
<
int
>
colsA
(
nCols
);
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
colsA
[
j
]
=
nOverallB
*
nComponents
+
cols
[
j
];
MatSetValues
(
A_global
,
1
,
&
rowIndex
,
nCols
,
&
(
colsA
[
0
]),
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_b_primal
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
// === mat_primal_b ===
for
(
int
i
=
0
;
i
<
nRankPrimals
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartPrimals
*
nComponents
+
i
;
MatGetRow
(
mat_primal_b
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
int
rowIndexA
=
nOverallB
*
nComponents
+
rowIndex
;
MatSetValues
(
A_global
,
1
,
&
rowIndexA
,
nCols
,
cols
,
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_primal_b
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
// === mat_lagrange ===
for
(
int
i
=
0
;
i
<
nRankLagrange
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartLagrange
*
nComponents
+
i
;
MatGetRow
(
mat_lagrange
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
int
rowIndexA
=
(
nOverallB
+
nOverallPrimals
)
*
nComponents
+
rowIndex
;
MatSetValues
(
A_global
,
1
,
&
rowIndexA
,
nCols
,
cols
,
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_lagrange
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
// === mat_lagrange_transpose ===
for
(
int
i
=
0
;
i
<
nRankB
*
nComponents
;
i
++
)
{
int
rowIndex
=
rStartB
*
nComponents
+
i
;
MatGetRow
(
mat_lagrange_transpose
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
vector
<
int
>
colsA
(
nCols
);
for
(
int
j
=
0
;
j
<
nCols
;
j
++
)
colsA
[
j
]
=
(
nOverallB
+
nOverallPrimals
)
*
nComponents
+
cols
[
j
];
MatSetValues
(
A_global
,
1
,
&
rowIndex
,
nCols
,
&
(
colsA
[
0
]),
vals
,
ADD_VALUES
);
MatRestoreRow
(
mat_lagrange_transpose
,
rowIndex
,
&
nCols
,
&
cols
,
&
vals
);
}
MatAssemblyBegin
(
A_global
,
MAT_FINAL_ASSEMBLY
);
MatAssemblyEnd
(
A_global
,
MAT_FINAL_ASSEMBLY
);
VecAssemblyBegin
(
A_global_rhs
);
VecAssemblyEnd
(
A_global_rhs
);
// === Create solver and solve the overall FETI system. ===
KSP
ksp
;
KSPCreate
(
PETSC_COMM_WORLD
,
&
ksp
);
KSPSetOperators
(
ksp
,
A_global
,
A_global
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
ksp
);
KSPSolve
(
ksp
,
A_global_rhs
,
A_global_sol
);
Vec
u_b
,
u_primal
;
VecDuplicate
(
f_b
,
&
u_b
);
VecDuplicate
(
f_primal
,
&
u_primal
);
vector
<
PetscInt
>
localBIndex
,
globalBIndex
;
localBIndex
.
reserve
(
nRankB
*
nComponents
);
globalBIndex
.
reserve
(
nRankB
*
nComponents
);
for
(
int
i
=
0
;
i
<
nRankB
*
nComponents
;
i
++
)
{
localBIndex
.
push_back
(
rStartB
*
nComponents
+
i
);
globalBIndex
.
push_back
(
rStartB
*
nComponents
+
i
);
}
IS
localBIs
,
globalBIs
;
ISCreateGeneral
(
PETSC_COMM_SELF
,
localBIndex
.
size
(),
&
(
localBIndex
[
0
]),
PETSC_USE_POINTER
,
&
localBIs
);
ISCreateGeneral
(
PETSC_COMM_SELF
,
globalBIndex
.
size
(),
&
(
globalBIndex
[
0
]),
PETSC_USE_POINTER
,
&
globalBIs
);
VecScatter
vecscat
;
VecScatterCreate
(
A_global_sol
,
globalBIs
,
u_b
,
localBIs
,
&
vecscat
);
VecScatterBegin
(
vecscat
,
A_global_sol
,
u_b
,
INSERT_VALUES
,
SCATTER_FORWARD
);
VecScatterEnd
(
vecscat
,
A_global_sol
,
u_b
,
INSERT_VALUES
,
SCATTER_FORWARD
);
ISDestroy
(
&
localBIs
);
ISDestroy
(
&
globalBIs
);
VecScatterDestroy
(
&
vecscat
);
localBIndex
.
resize
(
0
);
globalBIndex
.
resize
(
0
);
localBIndex
.
reserve
(
nRankPrimals
*
nComponents
);
globalBIndex
.
reserve
(
nRankPrimals
*
nComponents
);
for
(
int
i
=
0
;
i
<
nRankPrimals
*
nComponents
;
i
++
)
{
localBIndex
.
push_back
(
rStartPrimals
*
nComponents
+
i
);
globalBIndex
.
push_back
(
nOverallB
*
nComponents
+
rStartPrimals
*
nComponents
+
i
);
}
ISCreateGeneral
(
PETSC_COMM_SELF
,
localBIndex
.
size
(),
&
(
localBIndex
[
0
]),
PETSC_USE_POINTER
,
&
localBIs
);
ISCreateGeneral
(
PETSC_COMM_SELF
,
globalBIndex
.
size
(),
&
(
globalBIndex
[
0
]),
PETSC_USE_POINTER
,
&
globalBIs
);
VecScatterCreate
(
A_global_sol
,
globalBIs
,
u_primal
,
localBIs
,
&
vecscat
);
VecScatterBegin
(
vecscat
,
A_global_sol
,
u_primal
,
INSERT_VALUES
,
SCATTER_FORWARD
);
VecScatterEnd
(
vecscat
,
A_global_sol
,
u_primal
,
INSERT_VALUES
,
SCATTER_FORWARD
);
ISDestroy
(
&
localBIs
);
ISDestroy
(
&
globalBIs
);
VecScatterDestroy
(
&
vecscat
);
recoverSolution
(
u_b
,
u_primal
,
vec
);
KSPDestroy
(
&
ksp
);
}
void
PetscSolverFeti
::
solveReducedFetiMatrix
(
SystemVector
&
vec
)
void
PetscSolverFeti
::
solveReducedFetiMatrix
(
SystemVector
&
vec
)
{
{
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
FUNCNAME
(
"PetscSolverFeti::solveReducedFetiMatrix()"
);
...
@@ -1543,9 +1716,14 @@ namespace AMDiS {
...
@@ -1543,9 +1716,14 @@ namespace AMDiS {
int
debug
=
0
;
int
debug
=
0
;
Parameters
::
get
(
"parallel->debug feti"
,
debug
);
Parameters
::
get
(
"parallel->debug feti"
,
debug
);
if
(
debug
)
{
solveFetiMatrix
(
vec
);
}
else
{
// resetStatistics();
// resetStatistics();
solveReducedFetiMatrix
(
vec
);
solveReducedFetiMatrix
(
vec
);
// printStatistics();
// printStatistics();
}
VtkWriter
::
writeFile
(
&
vec
,
"test-before.vtu"
);
VtkWriter
::
writeFile
(
&
vec
,
"test-before.vtu"
);
MeshDistributor
::
globalMeshDistributor
->
synchVector
(
vec
);
MeshDistributor
::
globalMeshDistributor
->
synchVector
(
vec
);
...
...
AMDiS/src/parallel/PetscSolverFeti.h
View file @
26dceb38
...
@@ -69,11 +69,14 @@ namespace AMDiS {
...
@@ -69,11 +69,14 @@ namespace AMDiS {
Mat
*
mat_lagrange
;
Mat
*
mat_lagrange
;
/// Temporal vectors on the B variables.
/// Temporal vectors on the B variables.
Vec
tmp_vec_b
0
,
tmp_vec_b1
;
Vec
tmp_vec_b
;
/// Temporal vector on the primal variables.
/// Temporal vector on the primal variables.
Vec
tmp_vec_primal
;
Vec
tmp_vec_primal
;
/// Temporal vector on the lagrange variables.
Vec
tmp_vec_lagrange
;
PetscSolverFeti
*
fetiSolver
;
PetscSolverFeti
*
fetiSolver
;
/// Pointer to the solver of the schur complement on the primal variables.
/// Pointer to the solver of the schur complement on the primal variables.
...
@@ -239,6 +242,15 @@ namespace AMDiS {
...
@@ -239,6 +242,15 @@ namespace AMDiS {
Vec
&
vec_sol_primal
,
Vec
&
vec_sol_primal
,
SystemVector
&
vec
);
SystemVector
&
vec
);
/** \brief
* Solves the FETI-DP system globally, thus without reducing it to the
* Lagrange multipliers. This should be used for debugging only to test
* if the FETI-DP system is setup correctly.
*
* \param[out] vec Solution DOF vectors.
*/
void
solveFetiMatrix
(
SystemVector
&
vec
);
/** \brief
/** \brief
* Solves the FETI-DP system with reducing it first to the Lagrange
* Solves the FETI-DP system with reducing it first to the Lagrange
* multipliers. This is what one expects when using the FETI-DP methid :)
* multipliers. This is what one expects when using the FETI-DP methid :)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment