Commit e4673262 authored by Thomas Witkowski's avatar Thomas Witkowski

Work on this and that.

parent ff250c55
......@@ -29,6 +29,7 @@
#include "parallel/CheckerPartitioner.h"
#include "parallel/MpiHelper.h"
#include "parallel/DofComm.h"
#include "parallel/ParallelProblemStatBase.h"
#include "io/ElementFileWriter.h"
#include "io/MacroInfo.h"
#include "io/MacroWriter.h"
......@@ -145,6 +146,10 @@ namespace AMDiS {
if (initialized)
return;
double first = MPI::Wtime();
MSG("Initialization phase 1 needed %.5f seconds\n",
first - ParallelProblemStatBase::initTimeStamp);
TEST_EXIT(mpiSize > 1)
("Parallelization does not work with only one process!\n");
TEST_EXIT(feSpaces.size() > 0)
......@@ -348,6 +353,7 @@ namespace AMDiS {
elObjDb.clear();
initialized = true;
MSG("Init parallelization needed %.5f seconds\n", MPI::Wtime() - first);
}
......@@ -1325,8 +1331,10 @@ namespace AMDiS {
bool partitioningSucceed =
partitioner->partition(elemWeights, ADAPTIVE_REPART);
if (!partitioningSucceed) {
MPI::COMM_WORLD.Barrier();
repartitioningFailed = 20;
MSG("Mesh partitioner created empty partition!\n");
MSG("Mesh repartitioning needed %.5f seconds\n", MPI::Wtime() - timePoint);
return;
}
......@@ -1334,9 +1342,23 @@ namespace AMDiS {
// In the case the partitioner does not create a new mesh partition, return
// without and changes.
if (!partitioner->meshChanged()) {
repartitioningFailed = 20;
MSG("Mesh partition does not create a new partition!\n");
return;
MSG("Try to refine partitioning!\n");
partitioningSucceed = partitioner->partition(elemWeights, REFINE_PART);
if (partitioningSucceed) {
MSG("OKAY, ERST MAL GUT!\n");
if (partitioner->meshChanged())
MSG("UND JA, DAS WARS!\n");
else
MSG("NE, LEIDER NICHT!\n");
}
if (!partitioningSucceed || !partitioner->meshChanged()) {
MPI::COMM_WORLD.Barrier();
repartitioningFailed = 20;
MSG("Mesh repartitioning needed %.5f seconds\n", MPI::Wtime() - timePoint);
return;
}
}
......
......@@ -299,9 +299,9 @@ namespace AMDiS {
tpwgts[i] = 1.0 / static_cast<double>(nparts);
float scale = 10000.0 / maxWgt;
for (int i = 0; i < nElements; i++)
for (int i = 0; i < nElements; i++)
wgts[i] = floatWgts[i];
// wgts[i] = static_cast<int>(floatWgts[i] * scale);
// wgts[i] = static_cast<int>(floatWgts[i] * scale);
// === Start ParMETIS. ===
......
......@@ -17,6 +17,19 @@
namespace AMDiS {
double ParallelProblemStatBase::initTimeStamp = 0.0;
ParallelProblemStatBase::ParallelProblemStatBase(std::string nameStr,
ProblemIterationInterface *problemIteration)
: ProblemStatSeq(nameStr, problemIteration),
meshDistributor(NULL)
{
initTimeStamp = MPI::Wtime();
mpi::globalMin(initTimeStamp);
}
void ParallelProblemStatBase::buildAfterCoarsen(AdaptInfo *adaptInfo, Flag flag,
bool assembleMatrix,
bool assembleVector)
......@@ -37,6 +50,9 @@ namespace AMDiS {
ProblemStatSeq *adoptProblem,
Flag adoptFlag)
{
MSG("Initialization phase 0 needed %.5f seconds\n",
MPI::Wtime() - initTimeStamp);
ProblemStatSeq::initialize(initFlag, adoptProblem, adoptFlag);
MeshDistributor::addProblemStatGlobal(this);
......
......@@ -32,10 +32,7 @@ namespace AMDiS {
{
public:
ParallelProblemStatBase(std::string nameStr,
ProblemIterationInterface *problemIteration = NULL)
: ProblemStatSeq(nameStr, problemIteration),
meshDistributor(NULL)
{}
ProblemIterationInterface *problemIteration = NULL);
virtual ~ParallelProblemStatBase() {}
......@@ -51,6 +48,9 @@ namespace AMDiS {
protected:
MeshDistributor *meshDistributor;
public:
static double initTimeStamp;
};
}
......
......@@ -1006,9 +1006,9 @@ namespace AMDiS {
// matTmp = inv(A_BB) trans(J) trans(Q)
Mat qT, jTqT;
MatTranspose(mat_augmented_lagrange, MAT_INITIAL_MATRIX, &qT);
Mat jT;
// Mat jT;
MSG("START COMPUTING MAT TRANS\n");
MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &jT);
// MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &jT);
MSG("DONE\n");
MatTransposeMatMult(mat_lagrange, qT, MAT_INITIAL_MATRIX, PETSC_DEFAULT,
&jTqT);
......@@ -2118,35 +2118,6 @@ namespace AMDiS {
interfaceDofMap.createVec(vecRhsInterface);
interfaceDofMap.createVec(vecSolInterface);
{
// PetscViewer petscView;
// PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec",
// FILE_MODE_READ, &petscView);
// VecLoad(vecSolInterface, petscView);
// PetscViewerDestroy(&petscView);
}
{
// PetscViewer petscView;
// PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec",
// FILE_MODE_READ, &petscView);
// VecLoad(vecSolLagrange, petscView);
// PetscViewerDestroy(&petscView);
}
{
int n;
VecGetSize(vecSolInterface, &n);
double sum;
VecSum(vecSolInterface, &sum);
sum = -sum / static_cast<int>(n);
MSG("AVRG = %e\n", sum);
}
Vec vecRhsArray[2] = {vecRhsInterface, vecRhsLagrange};
VecCreateNest(mpiCommGlobal, 2, PETSC_NULL, vecRhsArray, &vecRhs);
......@@ -2262,38 +2233,8 @@ namespace AMDiS {
PetscSolverFetiDebug::debugFeti(*this, vecRhs);
// === Solve with FETI-DP operator. ===
KSPSetInitialGuessNonzero(ksp_feti, PETSC_TRUE);
KSPSolve(ksp_feti, vecRhs, vecSol);
{
int n;
VecGetSize(vecSolInterface, &n);
double sum;
VecSum(vecSolInterface, &sum);
sum = -sum / static_cast<int>(n);
MSG("SOL PRESSURE AVRG = %e\n", sum);
}
{
PetscViewer petscView;
PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol0.vec",
FILE_MODE_WRITE, &petscView);
VecView(vecSolInterface, petscView);
PetscViewerDestroy(&petscView);
}
{
PetscViewer petscView;
PetscViewerBinaryOpen(PETSC_COMM_WORLD, "sol1.vec",
FILE_MODE_WRITE, &petscView);
VecView(vecSolLagrange, petscView);
PetscViewerDestroy(&petscView);
}
if (printTimings) {
MPI::COMM_WORLD.Barrier();
MSG("FETI-DP timing 10: %.5f seconds (application of FETI-DP operator)\n",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment