Commit 044d0db0 authored by Praetorius, Simon's avatar Praetorius, Simon

test for correct volumes added

parent 58fc030e
......@@ -25,6 +25,7 @@ dune_enable_all_packages()
add_subdirectory(src)
add_subdirectory(dune)
add_subdirectory(doc)
add_subdirectory(test)
add_subdirectory(cmake/modules)
# finalize the dune project, e.g. generating config.h etc.
......
......@@ -4,5 +4,8 @@ if (HAVE_ALBERTA)
add_dune_alberta_flags(GRIDDIM 3 WORLDDIM 3 "testiterator")
endif (HAVE_ALBERTA)
add_executable("localrefinement" localrefinement.cc)
target_link_dune_default_libraries("localrefinement")
add_executable("uggrid" uggrid.cc)
target_link_dune_default_libraries("uggrid")
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/parallel/mpihelper.hh> // An initializer of MPI
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/common/timer.hh>
#include <dune/grid/albertagrid.hh>
#include <dune/grid/yaspgrid.hh>
#include <dune/grid/uggrid.hh>
#include <dune/alugrid/grid.hh>
#include <dune/multimesh/multimesh.hh>
#include <dune/multimesh/mmgridfactory.hh>
#include <dune/multimesh/utility/structuredgridbuilder.hh>
#define GRID 2
using namespace Dune;
template <class Grid>
void printGrid(Grid const& grid)
{
volatile std::size_t n = 0;
Dune::Timer t;
for (auto const& entities : elements(grid.leafGridView())) {
n += grid[0].leafIndexSet().index(entities[0]);
std::cout << "indices = [";
for (std::size_t i = 0; i < entities.size(); ++i) {
std::cout << (i > 0 ? ", " : "") << grid[i].leafIndexSet().index(entities[i]);
}
std::cout << "]\n";
}
std::cout << "n = " << n << "\n";
std::cout << "time: " << t.elapsed() << "\n";
}
template <class Grid>
void printGrid2(Grid const& grid)
{
volatile std::size_t n = 0;
Dune::Timer t;
for (auto const& entity : elements(grid.leafGridView()))
n += grid.leafIndexSet().index(entity);
std::cout << n << "\n";
std::cout << "time: " << t.elapsed() << "\n";
}
int main(int argc, char** argv)
{
MPIHelper::instance(argc, argv);
FieldVector<double,2> lower_left = {-1.5, -1.5};
FieldVector<double,2> bbox = {1.5, 1.5};
std::array<unsigned int,2> num_elements = {2, 2};
using HostGrid = Dune::ALUGrid<2, 2, Dune::simplex, Dune::conforming>;
using Factory = StructuredGridBuilder<MultiMesh<HostGrid> >;
GridFactory<MultiMesh<HostGrid> > gridFactory(3);
auto gridPtr = Factory::createSimplexGrid(gridFactory, lower_left, bbox, num_elements);
auto& grid = *gridPtr;
std::cout << "number of grids = " << grid.size() << "\n";
printGrid2(grid[0]);
printGrid(grid);
grid[0].globalRefine(2);
grid[1].globalRefine(1);
// mark the first 5 elements for refinement
int num = 0;
for (auto const& entity : elements(grid[1].leafGridView())) {
if (num++ >= 5)
break;
grid[1].mark(1, entity);
}
grid[1].preAdapt();
grid[1].adapt();
grid[1].postAdapt();
printGrid2(grid[0]);
printGrid(grid);
}
dune_add_test(SOURCES testvolume.cc)
\ No newline at end of file
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <functional>
#include <iostream>
#include <numeric>
#include <dune/common/filledarray.hh>
#include <dune/common/parallel/mpihelper.hh>
#include <dune/grid/yaspgrid.hh>
#include <dune/multimesh/multimesh.hh>
using namespace Dune;
template <std::size_t dim>
bool test_dim()
{
FieldVector<double,dim> lower; lower = -1.5;
FieldVector<double,dim> upper; upper = 1.5;
auto num_elements = filledArray<dim>(2);
using HostGrid = YaspGrid<dim, EquidistantOffsetCoordinates<double,dim>>;
MultiMesh<HostGrid> grid(3, lower, upper, num_elements);
// volume of the domain
double domain = std::inner_product(lower.begin(), lower.end(), upper.begin(), 1.0, std::multiplies<>{},
[](double l, double u) { return std::abs(u - l); });
std::cout << "volume(domain<" << dim << ">) = " << domain << "\n";
grid[0].globalRefine(2);
grid[1].globalRefine(1);
grid[2].globalRefine(3);
// calculate volume by summing up the entity volumes of the smalles leaf entities
double volume = 0.0;
for (auto const& entities : elements(grid.leafGridView())) {
auto it_small = std::max_element(entities.begin(), entities.end(),
[](auto const& e1, auto const& e2) { return e1.level() < e2.level(); });
auto geo_small = it_small->geometry();
volume += geo_small.volume();
}
std::cout << "volume(elements<" << dim << ">) = " << volume << "\n";
if (std::abs(volume - domain) > 1.e-10)
return false;
else
return true;
}
int main(int argc, char** argv)
{
MPIHelper::instance(argc, argv);
bool b1 = test_dim<1>();
bool b2 = test_dim<2>();
bool b3 = test_dim<3>();
return b1 && b2 && b3 ? 0 : 1;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment