Commit 0645a6bf authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Added more support for parallel test. One parallel test for FETI-DP.

parent c6555c8c
......@@ -800,28 +800,28 @@ namespace AMDiS {
{
std::set<int> m;
m.insert(0); m.insert(1); m.insert(4); m.insert(5);
levelData.addLevel(m);
levelData.addLevel(m, 0);
}
break;
case 2: case 3: case 6: case 7:
{
std::set<int> m;
m.insert(2); m.insert(3); m.insert(6); m.insert(7);
levelData.addLevel(m);
levelData.addLevel(m, 1);
}
break;
case 8: case 9: case 12: case 13:
{
std::set<int> m;
m.insert(8); m.insert(9); m.insert(12); m.insert(13);
levelData.addLevel(m);
levelData.addLevel(m, 2);
}
break;
case 10: case 11: case 14: case 15:
{
std::set<int> m;
m.insert(10); m.insert(11); m.insert(14); m.insert(15);
levelData.addLevel(m);
levelData.addLevel(m, 3);
}
break;
}
......
......@@ -27,26 +27,31 @@ namespace AMDiS {
levelNeighbours.resize(1);
levelNeighbours[0] = neighbourRanks;
mpiComms.resize(1);
mpiComms[0] = MPI::COMM_WORLD;
}
void MeshLevelData::addLevel(std::set<int> &ranksInDomain)
void MeshLevelData::addLevel(std::set<int> &ranksInDomain, int domainId)
{
FUNCNAME("MeshLevelData()::addLevel()");
TEST_EXIT(nLevel >= 1)("Mesh level structure is not initialized()");
TEST_EXIT(nLevel == 1)("Only 2 level are supported yet!\n");
levelRanks.insert(levelRanks.begin(), ranksInDomain);
levelRanks.push_back(ranksInDomain);
nLevel++;
levelNeighbours.resize(2);
levelNeighbours[1] = levelNeighbours[0];
levelNeighbours[0].clear();
for (std::set<int>::iterator it = levelNeighbours[1].begin();
it != levelNeighbours[1].end(); ++it)
if (levelRanks[0].count(*it) == 0)
levelNeighbours[0].insert(*it);
levelNeighbours[1].clear();
for (std::set<int>::iterator it = levelNeighbours[0].begin();
it != levelNeighbours[0].end(); ++it)
if (levelRanks[1].count(*it) == 0)
levelNeighbours[1].insert(*it);
mpiComms.resize(2);
mpiComms[1] = mpiComms[0].Split(domainId, mpiComms[0].Get_rank());
}
......
......@@ -27,6 +27,7 @@
#include <iostream>
#include <set>
#include <vector>
#include <mpi.h>
#include "Global.h"
namespace AMDiS {
......@@ -42,7 +43,7 @@ namespace AMDiS {
void init(std::set<int> &neighbourRanks);
void addLevel(std::set<int> &ranksInDomain);
void addLevel(std::set<int> &ranksInDomain, int domainId);
// Writes all data of this object to an output stream.
void serialize(ostream &out);
......@@ -66,12 +67,19 @@ namespace AMDiS {
return levelNeighbours[level];
}
int getLevelNumber()
{
return nLevel;
}
protected:
vector<std::set<int> > levelRanks;
int nLevel;
vector<std::set<int> > levelRanks;
vector<std::set<int> > levelNeighbours;
vector<MPI::Intracomm> mpiComms;
};
}
......
......@@ -305,7 +305,8 @@ namespace AMDiS {
{
FUNCNAME("ParallelDofMapping::operator[]()");
TEST_EXIT_DBG(data.count(feSpace))("Should not happen!\n");
TEST_EXIT_DBG(data.count(feSpace))
("No data for FE space at address %p!\n", feSpace);
return data.find(feSpace)->second;
}
......
......@@ -217,9 +217,34 @@ namespace AMDiS {
}
void PetscSolverFeti::updateDofData()
void PetscSolverFeti::initialize(vector<const FiniteElemSpace*> feSpaces)
{
FUNCNAME("PetscSolverFeti::updateDofData()");
FUNCNAME("PetscSolverFeti::initialize()");
if (subDomainSolver == NULL)
subDomainSolver = new SubDomainSolver(meshDistributor, mpiComm, mpiSelfComm);
primalDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true);
dualDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
lagrangeMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true);
localDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
if (fetiPreconditioner == FETI_DIRICHLET)
interiorDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
}
void PetscSolverFeti::createFetiData()
{
FUNCNAME("PetscSolverFeti::createFetiData()");
TEST_EXIT(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT(meshDistributor->getFeSpaces().size() > 0)
("No FE space defined in mesh distributor!\n");
primalDofMap.clear();
dualDofMap.clear();
......@@ -893,26 +918,13 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::fillPetscMatrix()");
if (subDomainSolver == NULL)
subDomainSolver = new SubDomainSolver(meshDistributor, mpiComm, mpiSelfComm);
// === Create all sets and indices. ===
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
primalDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true);
dualDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
lagrangeMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true);
localDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
if (fetiPreconditioner == FETI_DIRICHLET)
interiorDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
initialize(feSpaces);
updateDofData();
createFetiData();
// === Create matrices for the FETI-DP method. ===
......
......@@ -69,12 +69,26 @@ namespace AMDiS {
MeshDistributor::BOUNDARY_FILL_INFO_RECV_DOFS;
}
protected:
/// Initialization of the data structures with a given list of the FE
/// spaces of all components.
void initialize(vector<const FiniteElemSpace*> feSpaces);
/// After mesh changes, or if the solver is called the first time, this
/// function creates all matrix and vector objects with the approriated
/// sizes.
void updateDofData();
/// function creates all information about primal nodes, dual nodes and
/// lagrange constraints.
void createFetiData();
int getNumberOfPrimals()
{
return primalDofMap.getOverallDofs();
}
int getNumberOfDuals()
{
return dualDofMap.getOverallDofs();
}
protected:
/// Defines which boundary nodes are primal. Creates global index of
/// the primal variables.
void createPrimals(const FiniteElemSpace *feSpace);
......
......@@ -25,6 +25,7 @@ ellipt->marker[0]->strategy: 0 % 0: no adaption 1: GR 2: MS 3: ES 4:GERS
ellipt->output->filename: output/ellipt.2d
ellipt->output->ParaView format: 1
parallel->log main rank: 0
parallel->pre refine: 0
parallel->partitioner: checker
parallel->log main rank: 0
parallel->pre refine: 0
parallel->partitioner: checker
parallel->multi level test: 1
dimension of world: 2
elliptMesh->macro file name: ./macro/macro.stand.p16.2d
elliptMesh->global refinements: 2
ellipt->mesh: elliptMesh
ellipt->dim: 2
ellipt->components: 1
ellipt->polynomial degree[0]: 1
ellipt->solver: cg
ellipt->solver->max iteration: 10
ellipt->solver->tolerance: 1.e-8
ellipt->solver->info: 10
ellipt->solver->left precon: diag
ellipt->solver->right precon: no
ellipt->estimator[0]: 0
ellipt->estimator[0]->error norm: 1 % 1: H1_NORM, 2: L2_NORM
ellipt->estimator[0]->C0: 0.1 % constant of element residual
ellipt->estimator[0]->C1: 0.1 % constant of jump residual
ellipt->marker[0]->strategy: 0 % 0: no adaption 1: GR 2: MS 3: ES 4:GERS
ellipt->output->filename: output/ellipt.2d
ellipt->output->ParaView format: 1
parallel->log main rank: 0
parallel->pre refine: 0
parallel->partitioner: checker
parallel->multi level test: 1
parallel->solver: petsc-feti
......@@ -8,7 +8,15 @@
using namespace AMDiS;
using namespace std;
BOOST_AUTO_TEST_CASE(blub)
/*
- Test for initial repartitioning using the checker partitioner on a standard
macro for 16 threads.
- Test for 2 level hierarchical decomposition of the mesh into 4 2x2
subdomains.
*/
BOOST_AUTO_TEST_CASE(amdis_mpi_simple_partitioning)
{
BOOST_REQUIRE(MPI::COMM_WORLD.Get_size() == 16);
......@@ -20,15 +28,17 @@ BOOST_AUTO_TEST_CASE(blub)
BOOST_REQUIRE(mesh->getNumberOfElements() == 2);
BOOST_REQUIRE(mesh->getNumberOfLeaves() == 2);
std::set<int> levelRanks =
MeshDistributor::globalMeshDistributor->getMeshLevelData().getLevelRanks(0);
MeshLevelData& md = MeshDistributor::globalMeshDistributor->getMeshLevelData();
BOOST_REQUIRE(md.getLevelNumber() == 2);
std::set<int> levelRanks = md.getLevelRanks(0);
std::set<int> rankTest;
rankTest.insert(-1);
BOOST_REQUIRE(levelRanks == rankTest);
std::set<int> levelNeighbours =
MeshDistributor::globalMeshDistributor->getMeshLevelData().getLevelNeighbours(0);
std::set<int> levelNeighbours = md.getLevelNeighbours(0);
std::set<int> neighTest;
switch (MPI::COMM_WORLD.Get_rank()) {
case 0:
......@@ -86,6 +96,71 @@ BOOST_AUTO_TEST_CASE(blub)
}
BOOST_REQUIRE(levelNeighbours == neighTest);
levelRanks = md.getLevelRanks(1);
rankTest.clear();
switch (MPI::COMM_WORLD.Get_rank()) {
case 0: case 1: case 4: case 5:
rankTest.insert(0); rankTest.insert(1); rankTest.insert(4); rankTest.insert(5);
break;
case 2: case 3: case 6: case 7:
rankTest.insert(2); rankTest.insert(3); rankTest.insert(6); rankTest.insert(7);
break;
case 8: case 9: case 12: case 13:
rankTest.insert(8); rankTest.insert(9); rankTest.insert(12); rankTest.insert(13);
break;
case 10: case 11: case 14: case 15:
rankTest.insert(10); rankTest.insert(11); rankTest.insert(14); rankTest.insert(15);
break;
}
BOOST_REQUIRE(levelRanks == rankTest);
levelNeighbours = md.getLevelNeighbours(1);
neighTest.clear();
switch (MPI::COMM_WORLD.Get_rank()) {
case 1:
neighTest.insert(2); neighTest.insert(6);
break;
case 2:
neighTest.insert(1);
break;
case 4:
neighTest.insert(8); neighTest.insert(9);
break;
case 5:
neighTest.insert(6); neighTest.insert(9); neighTest.insert(10);
break;
case 6:
neighTest.insert(1); neighTest.insert(5); neighTest.insert(10); neighTest.insert(11);
break;
case 7:
neighTest.insert(11);
break;
case 8:
neighTest.insert(4);
break;
case 9:
neighTest.insert(4); neighTest.insert(5); neighTest.insert(10); neighTest.insert(14);
break;
case 10:
neighTest.insert(5); neighTest.insert(6); neighTest.insert(9);
break;
case 11:
neighTest.insert(6); neighTest.insert(7);
break;
case 13:
neighTest.insert(14);
break;
case 14:
neighTest.insert(9); neighTest.insert(13);
break;
}
BOOST_REQUIRE(levelNeighbours == neighTest);
}
......
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE 0002
#define BOOST_TEST_NO_MAIN
#include <boost/test/unit_test.hpp>
#include <AMDiS.h>
using namespace AMDiS;
using namespace std;
/*
-
*/
BOOST_AUTO_TEST_CASE(amdis_mpi_simple_partitioning)
{
BOOST_REQUIRE(MPI::COMM_WORLD.Get_size() == 16);
ProblemStat ellipt("ellipt");
ellipt.initialize(INIT_ALL);
MeshDistributor *meshDist = MeshDistributor::globalMeshDistributor;
meshDist->initParallelization();
Mesh* mesh = ellipt.getMesh();
BOOST_REQUIRE(mesh->getNumberOfLeaves() == 8);
BOOST_REQUIRE(meshDist->getMeshLevelData().getLevelNumber() == 2);
vector<const FiniteElemSpace*> feSpaces;
feSpaces.push_back(ellipt.getFeSpace(0));
PetscSolverFeti &feti = *(dynamic_cast<PetscSolverFeti*>(ellipt.getPetscSolver()));
feti.setMeshDistributor(meshDist);
feti.initialize(feSpaces);
feti.createFetiData();
BOOST_REQUIRE(feti.getNumberOfPrimals() == 21);
BOOST_REQUIRE(feti.getNumberOfDuals() == 48);
}
int main(int argc, char **argv)
{
AMDiS::init(argc, argv, "./init/test0002.dat.2d");
boost::unit_test::unit_test_main(&init_unit_test, argc, argv);
AMDiS::finalize();
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment