Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Backofen, Rainer
amdis
Commits
c0f4aba5
Commit
c0f4aba5
authored
Feb 28, 2012
by
Thomas Witkowski
Browse files
Just to make some test, must remove messages later.
parent
90c8af7d
Changes
9
Hide whitespace changes
Inline
Side-by-side
AMDiS/src/ProblemStat.cc
View file @
c0f4aba5
...
...
@@ -821,8 +821,21 @@ namespace AMDiS {
continue
;
}
if
(
assembleMatrix
&&
matrix
->
getBoundaryManager
())
if
(
assembleMatrix
&&
matrix
->
getBoundaryManager
())
{
MSG
(
"TEST MAT FOR PERIODIC BC: %p on problem %s
\n
"
,
matrix
,
name
.
c_str
());
BoundaryIndexMap
&
boundaryMap
=
const_cast
<
BoundaryIndexMap
&>
(
matrix
->
getBoundaryManager
()
->
getBoundaryConditionMap
());
MSG
(
"PTR: %p
\n
"
,
&
boundaryMap
);
BoundaryIndexMap
::
iterator
it
=
boundaryMap
.
begin
();
while
(
it
!=
boundaryMap
.
end
())
{
if
(
it
->
second
->
isPeriodic
())
MSG
(
"HAS PERIODIC!
\n
"
);
++
it
;
}
MSG
(
"TEST A
\n
"
);
matrix
->
getBoundaryManager
()
->
initMatrix
(
matrix
);
MSG
(
"TEST B
\n
"
);
}
// The simplest case: either the right hand side has no operaters, no aux
// fe spaces, or all aux fe spaces are equal to the row and col fe space.
...
...
AMDiS/src/parallel/ElementObjectDatabase.cc
View file @
c0f4aba5
...
...
@@ -161,7 +161,6 @@ namespace AMDiS {
multPeriodicDof3
.
push_back
(
it
->
first
);
}
if
(
mesh
->
getDim
()
==
2
)
{
TEST_EXIT_DBG
(
multPeriodicDof2
.
size
()
==
0
||
multPeriodicDof2
.
size
()
==
4
)
...
...
@@ -173,8 +172,7 @@ namespace AMDiS {
multPeriodicDof3
.
size
()
==
8
)
(
"Should not happen (%d)!
\n
"
,
multPeriodicDof3
.
size
());
}
if
(
multPeriodicDof2
.
size
()
>
0
)
{
for
(
unsigned
int
i
=
0
;
i
<
multPeriodicDof2
.
size
();
i
++
)
{
DegreeOfFreedom
dof0
=
multPeriodicDof2
[
i
];
...
...
AMDiS/src/parallel/MeshDistributor.cc
View file @
c0f4aba5
...
...
@@ -457,13 +457,20 @@ namespace AMDiS {
deserialized
=
true
;
}
MSG
(
"PUSH BACK THAT SHITT!
\n
"
);
problemStat
.
push_back
(
probStat
);
// If the mesh distributor is already initialized, don't forgett to set rank
// DOFs object to the matrices and vectors of the added stationary problem.
// If the mesh distributor is already initialized, don't forget to set rank
// DOFs object to the matrices and vectors of the added stationary problem,
// and to remove the periodic boundary conditions on these objects.
if
(
initialized
)
if
(
initialized
)
{
MSG
(
"INIT 0
\n
"
);
setRankDofs
(
probStat
);
removePeriodicBoundaryConditions
(
probStat
);
}
else
{
MSG
(
"INIT 1
\n
"
);
}
}
...
...
@@ -717,23 +724,8 @@ namespace AMDiS {
FUNCNAME
(
"MeshDistributor::removePeriodicBoundaryConditions()"
);
// Remove periodic boundaries in boundary manager on matrices and vectors.
for
(
unsigned
int
i
=
0
;
i
<
problemStat
.
size
();
i
++
)
{
int
nComponents
=
problemStat
[
i
]
->
getNumComponents
();
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
for
(
int
k
=
0
;
k
<
nComponents
;
k
++
)
{
DOFMatrix
*
mat
=
problemStat
[
i
]
->
getSystemMatrix
(
j
,
k
);
if
(
mat
&&
mat
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
mat
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
}
if
(
problemStat
[
i
]
->
getSolution
()
->
getDOFVector
(
j
)
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
problemStat
[
i
]
->
getSolution
()
->
getDOFVector
(
j
)
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
if
(
problemStat
[
i
]
->
getRhs
()
->
getDOFVector
(
j
)
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
problemStat
[
i
]
->
getRhs
()
->
getDOFVector
(
j
)
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
}
}
for
(
unsigned
int
i
=
0
;
i
<
problemStat
.
size
();
i
++
)
removePeriodicBoundaryConditions
(
problemStat
[
i
]);
// Remove periodic boundaries on elements in mesh.
TraverseStack
stack
;
...
...
@@ -748,15 +740,56 @@ namespace AMDiS {
}
void
MeshDistributor
::
removePeriodicBoundaryConditions
(
ProblemStatSeq
*
probStat
)
{
FUNCNAME
(
"MeshDistributor::removePeriodicBoundaryConditions()"
);
MSG
(
"REMOVE ON PROB STAT %s
\n
"
,
probStat
->
getName
().
c_str
());
int
nComponents
=
probStat
->
getNumComponents
();
MSG
(
"NUM: %d
\n
"
,
nComponents
);
for
(
int
j
=
0
;
j
<
nComponents
;
j
++
)
{
for
(
int
k
=
0
;
k
<
nComponents
;
k
++
)
{
DOFMatrix
*
mat
=
probStat
->
getSystemMatrix
(
j
,
k
);
if
(
mat
&&
mat
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
mat
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
}
if
(
probStat
->
getSolution
()
->
getDOFVector
(
j
)
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
probStat
->
getSolution
()
->
getDOFVector
(
j
)
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
if
(
probStat
->
getRhs
()
->
getDOFVector
(
j
)
->
getBoundaryManager
())
removePeriodicBoundaryConditions
(
const_cast
<
BoundaryIndexMap
&>
(
probStat
->
getRhs
()
->
getDOFVector
(
j
)
->
getBoundaryManager
()
->
getBoundaryConditionMap
()));
}
}
void
MeshDistributor
::
removePeriodicBoundaryConditions
(
BoundaryIndexMap
&
boundaryMap
)
{
FUNCNAME
(
"MeshDistributor::removePeriodicBoundaryConditions()"
);
MSG
(
"START REMOVE: %p
\n
"
,
&
boundaryMap
);
BoundaryIndexMap
::
iterator
it
=
boundaryMap
.
begin
();
while
(
it
!=
boundaryMap
.
end
())
{
if
(
it
->
second
->
isPeriodic
())
if
(
it
->
second
->
isPeriodic
())
{
MSG
(
"FOUND AND REMOVE!
\n
"
);
boundaryMap
.
erase
(
it
++
);
else
}
else
++
it
;
}
MSG
(
"AND TEST AGAIN!
\n
"
);
it
=
boundaryMap
.
begin
();
while
(
it
!=
boundaryMap
.
end
())
{
if
(
it
->
second
->
isPeriodic
())
MSG
(
"FOUND PER!
\n
"
);
++
it
;
}
}
...
...
@@ -1561,12 +1594,16 @@ namespace AMDiS {
if
(
elObjDb
.
isInRank
(
it
->
first
.
first
,
mpiRank
)
==
false
)
continue
;
// MSG("PERIODIC DOF: %d -> %d with ASOC %d\n", it->first.first, it->first.second, it->second);
ElementObjectData
&
perDofEl0
=
elObjDb
.
getElementsInRank
(
it
->
first
.
first
)[
mpiRank
];
for
(
map
<
int
,
ElementObjectData
>::
iterator
elIt
=
elObjDb
.
getElementsInRank
(
it
->
first
.
second
).
begin
();
elIt
!=
elObjDb
.
getElementsInRank
(
it
->
first
.
second
).
end
();
++
elIt
)
{
// MSG(" in el %d %d\n", perDofEl0.elIndex, perDofEl0.ithObject);
int
otherElementRank
=
elIt
->
first
;
ElementObjectData
&
perDofEl1
=
elIt
->
second
;
...
...
@@ -2050,6 +2087,13 @@ namespace AMDiS {
{
FUNCNAME
(
"MeshDistributor::createPeriodicMap()"
);
// vector<ElementObjectData> objs = elObjDb.getElements(0);
// MSG("SIZE OF 0-DOF is: %d\n", objs.size());
// for (int i = 0; i < objs.size(); i++)
// MSG("0-DOF IN EL %d - %d\n", objs[i].elIndex, objs[i].ithObject);
StdMpi
<
vector
<
int
>
>
stdMpi
(
mpiComm
,
false
);
// === Each rank traverse its periodic boundaries and sends the DOF ===
...
...
@@ -2088,8 +2132,15 @@ namespace AMDiS {
DegreeOfFreedom
globalDof1
=
dofFeData
[
feSpace
].
mapDofToGlobal
[
*
(
dofs1
[
j
])];
if
(
!
periodicMap
.
isPeriodicOnBound
(
feSpace
,
type
,
globalDof0
))
if
(
!
periodicMap
.
isPeriodicOnBound
(
feSpace
,
type
,
globalDof0
))
{
// if (globalDof0 == 0) {
// MSG("A-ADD DOF 0 at BOUND: %d %d %d <-> %d %d %d \n",
// bound.rankObj.elIndex, bound.rankObj.subObj, bound.rankObj.ithObj,
// bound.neighObj.elIndex, bound.neighObj.subObj, bound.neighObj.ithObj);
// }
periodicMap
.
add
(
feSpace
,
type
,
globalDof0
,
globalDof1
);
}
}
}
...
...
@@ -2101,6 +2152,9 @@ namespace AMDiS {
for
(
vector
<
AtomicBoundary
>::
iterator
boundIt
=
it
->
second
.
begin
();
boundIt
!=
it
->
second
.
end
();
++
boundIt
)
{
// MSG(" NOW ADD DOFS ON PER BOUND EL %d %d with rank %d\n",
// boundIt->rankObj.elIndex, boundIt->rankObj.subObj, it->first);
int
nDofs
=
dofs
.
size
();
boundIt
->
rankObj
.
el
->
getAllDofs
(
feSpace
,
boundIt
->
rankObj
,
dofs
);
...
...
@@ -2111,7 +2165,7 @@ namespace AMDiS {
// Send the global indices to the rank on the other side.
stdMpi
.
getSendData
(
it
->
first
).
reserve
(
dofs
.
size
());
for
(
unsigned
int
i
=
0
;
i
<
dofs
.
size
();
i
++
)
stdMpi
.
getSendData
(
it
->
first
).
push_back
(
dofFeData
[
feSpace
].
mapDofToGlobal
[
*
(
dofs
[
i
])]);
stdMpi
.
getSendData
(
it
->
first
).
push_back
(
dofFeData
[
feSpace
].
mapDofToGlobal
[
*
(
dofs
[
i
])]);
// Receive from this rank the same number of dofs.
stdMpi
.
recv
(
it
->
first
,
dofs
.
size
());
...
...
@@ -2143,8 +2197,14 @@ namespace AMDiS {
// Check if this global DOF with the corresponding boundary type was
// not added before by another periodic boundary from other rank.
if
(
!
periodicMap
.
isPeriodicOnBound
(
feSpace
,
type
,
globalDofIndex
))
if
(
!
periodicMap
.
isPeriodicOnBound
(
feSpace
,
type
,
globalDofIndex
))
{
// MSG("B-ADD DOF %d -> %d at BOUND from rank %d\n",
// globalDofIndex,
// mapGlobalDofIndex,
// it->first);
periodicMap
.
add
(
feSpace
,
type
,
globalDofIndex
,
mapGlobalDofIndex
);
}
}
}
...
...
@@ -2175,11 +2235,17 @@ namespace AMDiS {
periodicMap
.
getAssociations
(
feSpace
,
globalDof
);
TEST_EXIT_DBG
(
assoc
.
size
()
>
0
)(
"Should not happen!
\n
"
);
for
(
std
::
set
<
BoundaryType
>::
iterator
perAscIt
=
assoc
.
begin
();
for
(
std
::
set
<
BoundaryType
>::
iterator
perAscIt
=
assoc
.
begin
();
perAscIt
!=
assoc
.
end
();
++
perAscIt
)
if
(
*
perAscIt
>=
-
3
)
if
(
*
perAscIt
>=
-
3
)
{
// MSG(" add map DOF %d -> %d (%d)\n",
// globalDof,
// periodicMap.map(feSpace, *perAscIt, globalDof),
// *perAscIt);
perObjMap
[
*
perAscIt
][
globalDof
]
=
periodicMap
.
map
(
feSpace
,
*
perAscIt
,
globalDof
);
}
}
}
...
...
AMDiS/src/parallel/MeshDistributor.h
View file @
c0f4aba5
...
...
@@ -523,6 +523,10 @@ namespace AMDiS {
/// vectors of all stationary problems and from the mesh itself.
void
removePeriodicBoundaryConditions
();
/// Removes all periodic boundary condition information from all matrices and
/// vector of a given stationary problem.
void
removePeriodicBoundaryConditions
(
ProblemStatSeq
*
probStat
);
// Removes all periodic boundaries from a given boundary map.
void
removePeriodicBoundaryConditions
(
BoundaryIndexMap
&
boundaryMap
);
...
...
AMDiS/src/parallel/ParallelDebug.cc
View file @
c0f4aba5
...
...
@@ -165,9 +165,11 @@ namespace AMDiS {
WorldVector
<
double
>
c
;
pdb
.
mesh
->
getDofIndexCoords
(
it
->
first
,
pdb
.
feSpaces
[
0
],
c
);
int
nAssoc
=
it
->
second
.
size
();
#if 0
TEST_EXIT_DBG(nAssoc == 1 || nAssoc == 3 || (pdb.mesh->getDim() == 3 && nAssoc == 7))
("Should not happen! DOF %d (%e %e %e) has %d periodic associations!\n",
it->first, c[0], c[1], (pdb.mesh->getDim() == 2 ? 0.0 : c[2]), nAssoc);
#endif
}
...
...
AMDiS/src/parallel/PeriodicMap.cc
View file @
c0f4aba5
...
...
@@ -22,8 +22,10 @@ namespace AMDiS {
for
(
PeriodicDofMap
::
iterator
it
=
newMap
.
begin
();
it
!=
newMap
.
end
();
++
it
)
for
(
DofMapping
::
iterator
dofIt
=
it
->
second
.
begin
();
dofIt
!=
it
->
second
.
end
();
++
dofIt
)
dofIt
!=
it
->
second
.
end
();
++
dofIt
)
{
add
(
feSpace
,
it
->
first
,
dofIt
->
second
,
dofIt
->
first
);
// MSG("ADD MAP %d %d with type %d\n", dofIt->second, dofIt->first, it->first);
}
}
...
...
AMDiS/src/parallel/PetscSolverFeti.cc
View file @
c0f4aba5
...
...
@@ -221,9 +221,10 @@ namespace AMDiS {
createPrimals
(
feSpace
);
createDuals
(
feSpace
);
createLagrange
(
feSpace
);
createIndexB
(
feSpace
);
}
createLagrange
(
meshDistributor
->
getFeSpaces
());
}
...
...
@@ -382,18 +383,19 @@ namespace AMDiS {
dualDofMap
[
feSpace
].
update
(
false
);
MSG
(
"nRankDuals = %d nOverallDuals = %d
\n
"
,
dualDofMap
[
feSpace
].
nRankDofs
,
dualDofMap
[
feSpace
].
nOverallDofs
);
dualDofMap
[
feSpace
].
nRankDofs
,
dualDofMap
[
feSpace
].
nOverallDofs
);
}
void
PetscSolverFeti
::
createLagrange
(
const
FiniteElemSpace
*
feSpace
)
void
PetscSolverFeti
::
createLagrange
(
vector
<
const
FiniteElemSpace
*
>
&
feSpace
s
)
{
FUNCNAME
(
"PetscSolverFeti::createLagrange()"
);
// === Reserve for each dual node, on the rank that owns this node, the ===
// === appropriate number of Lagrange constraints. ===
#if 0
int nRankLagrange = 0;
DofMapping& dualMap = dualDofMap[feSpace].getMap();
for (DofMapping::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
...
...
@@ -404,11 +406,10 @@ namespace AMDiS {
}
}
lagrangeMap[feSpace].nRankDofs = nRankLagrange;
lagrangeMap
[
feSpace
].
update
();
lagrangeMap[feSpace].update(
false
);
MSG("nRankLagrange = %d nOverallLagrange = %d\n",
lagrangeMap
[
feSpace
].
nRankDofs
,
lagrangeMap
[
feSpace
].
nOverallDofs
);
lagrangeMap[feSpace].nRankDofs, lagrangeMap[feSpace].nOverallDofs);
// === Communicate lagrangeMap to all other ranks. ===
...
...
@@ -451,6 +452,7 @@ namespace AMDiS {
}
}
}
#endif
}
...
...
@@ -490,8 +492,10 @@ namespace AMDiS {
localDofMap
[
feSpace
].
update
(
false
);
#if 0
dualDofMap[feSpace].addOffset(localDofMap[feSpace].rStartDofs +
nLocalInterior);
#endif
}
...
...
@@ -499,8 +503,6 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverFeti::createMatLagrange()"
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
// === Create distributed matrix for Lagrange constraints. ===
MatCreateMPIAIJ
(
PETSC_COMM_WORLD
,
...
...
@@ -518,33 +520,38 @@ namespace AMDiS {
// === m == r, than the rank sets -1.0 for the corresponding ===
// === constraint. ===
DofMapping
&
dualMap
=
dualDofMap
[
feSpace
].
getMap
();
for
(
DofMapping
::
iterator
it
=
dualMap
.
begin
();
it
!=
dualMap
.
end
();
++
it
)
{
TEST_EXIT_DBG
(
boundaryDofRanks
.
count
(
it
->
first
))
(
"Should not happen!
\n
"
);
// Global index of the first Lagrange constriant for this node.
int
index
=
lagrangeMap
[
feSpace
][
it
->
first
];
// Copy set of all ranks that contain this dual node.
vector
<
int
>
W
(
boundaryDofRanks
[
it
->
first
].
begin
(),
boundaryDofRanks
[
it
->
first
].
end
());
// Number of ranks that contain this dual node.
int
degree
=
W
.
size
();
for
(
int
i
=
0
;
i
<
degree
;
i
++
)
{
for
(
int
j
=
i
+
1
;
j
<
degree
;
j
++
)
{
if
(
W
[
i
]
==
mpiRank
||
W
[
j
]
==
mpiRank
)
{
// Set the constraint for all components of the system.
for
(
int
k
=
0
;
k
<
nComponents
;
k
++
)
{
int
rowIndex
=
index
*
nComponents
+
k
;
for
(
unsigned
int
feIdx
=
0
;
feIdx
<
feSpaces
.
size
();
feIdx
++
)
{
const
FiniteElemSpace
*
feSpace
=
feSpaces
[
feIdx
];
DofMapping
&
dualMap
=
dualDofMap
[
feSpace
].
getMap
();
for
(
DofMapping
::
iterator
it
=
dualMap
.
begin
();
it
!=
dualMap
.
end
();
++
it
)
{
TEST_EXIT_DBG
(
boundaryDofRanks
.
count
(
it
->
first
))
(
"Should not happen!
\n
"
);
// Global index of the first Lagrange constriant for this node.
int
constraintMatIndex
=
lagrangeMap
.
mapGlobal
(
it
->
first
,
feIdx
);
// Copy set of all ranks that contain this dual node.
vector
<
int
>
W
(
boundaryDofRanks
[
it
->
first
].
begin
(),
boundaryDofRanks
[
it
->
first
].
end
());
// Number of ranks that contain this dual node.
int
degree
=
W
.
size
();
for
(
int
i
=
0
;
i
<
degree
;
i
++
)
{
for
(
int
j
=
i
+
1
;
j
<
degree
;
j
++
)
{
if
(
W
[
i
]
==
mpiRank
||
W
[
j
]
==
mpiRank
)
{
#if 1
// Get global index of the dual variable
int
colIndex
=
localDofMap
.
mapGlobal
(
it
->
first
,
feIdx
);
#else
int
colIndex
=
it
->
second
*
nComponents
+
k
;
#endif
double
value
=
(
W
[
i
]
==
mpiRank
?
1.0
:
-
1.0
);
MatSetValue
(
mat_lagrange
,
row
Index
,
colIndex
,
value
,
INSERT_VALUES
);
MatSetValue
(
mat_lagrange
,
constraintMat
Index
,
colIndex
,
value
,
INSERT_VALUES
);
}
constraintMatIndex
++
;
}
index
++
;
}
}
}
...
...
AMDiS/src/parallel/PetscSolverFeti.h
View file @
c0f4aba5
...
...
@@ -83,7 +83,7 @@ namespace AMDiS {
/// Create Lagrange multiplier variables corresponding to the dual
/// variables.
void
createLagrange
(
const
FiniteElemSpace
*
feSpace
);
void
createLagrange
(
vector
<
const
FiniteElemSpace
*
>
&
feSpace
s
);
/// Creates a global index of the B variables.
void
createIndexB
(
const
FiniteElemSpace
*
feSpace
);
...
...
@@ -151,17 +151,18 @@ namespace AMDiS {
/// Mapping from primal DOF indices to a global index of primals.
FeSpaceData
<
GlobalDofMap
>
primalDofMap
;
/// Mapping from dual DOF indices to a global index of duals.
/// Index for each non primal DOF to the global index of
/// B variables.
FeSpaceData
<
GlobalDofMap
>
localDofMap
;
/// Mapping from dual DOF indices to a global index within the local (B)
/// variables.
FeSpaceData
<
GlobalDofMap
>
dualDofMap
;
/// Stores to each dual DOF index the index of the first Lagrange
/// constraint that is assigned to this DOF.
FeSpaceData
<
GlobalDofMap
>
lagrangeMap
;
/// Index for each non primal DOF to the global index of
/// B variables.
FeSpaceData
<
GlobalDofMap
>
localDofMap
;
/// Stores to each dual boundary DOF the set of ranks in which the DOF
/// is contained in.
DofIndexToPartitions
boundaryDofRanks
;
...
...
AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
View file @
c0f4aba5
...
...
@@ -95,9 +95,6 @@ namespace AMDiS {
KSPSetOperators
(
solver
,
petscMatrix
,
petscMatrix
,
SAME_NONZERO_PATTERN
);
KSPSetFromOptions
(
solver
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
MSG
(
"Fill petsc matrix needed %.5f seconds
\n
"
,
MPI
::
Wtime
()
-
wtime
);
}
...
...
@@ -137,6 +134,9 @@ namespace AMDiS {
{
FUNCNAME
(
"PetscSolverGlobalBlockMatrix::solvePetscMatrix()"
);
KSPGetPC
(
solver
,
&
pc
);
setBlockPreconditioner
(
pc
);
const
FiniteElemSpace
*
feSpace
=
meshDistributor
->
getFeSpace
(
0
);
VecDuplicate
(
petscRhsVec
,
&
petscSolVec
);
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment