testiterator.cc 1.71 KB
Newer Older
Praetorius, Simon's avatar
Praetorius, Simon committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:

#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/parallel/mpihelper.hh> // An initializer of MPI
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/common/timer.hh>

#include <dune/grid/yaspgrid.hh>

#include <dune/multimesh/multimesh.hh>

using namespace Dune;

template <class Grid>
void printGrid(Grid const& grid)
{
  volatile std::size_t n = 0;
  Dune::Timer t;
  for (auto const& entities : elements(grid.leafGridView()))
    n = std::max(std::size_t(n), std::size_t(grid.grid(0).leafIndexSet().index(entities[0])));
  std::cout << n << "\n";
  std::cout << "time: " << t.elapsed() << "\n";
}

template <class Grid>
void printGrid2(Grid const& grid)
{
  volatile std::size_t n = 0;
  Dune::Timer t;
  for (auto const& entity : elements(grid.leafGridView()))
    n = std::max(std::size_t(n), std::size_t(grid.leafIndexSet().index(entity)));
  std::cout << n << "\n";
  std::cout << "time: " << t.elapsed() << "\n";
}


int main(int argc, char** argv)
{
  MPIHelper::instance(argc, argv);

  FieldVector<double,3> lower_left = {-1.5, -1.5, -1.5};
  FieldVector<double,3> bbox = {1.5, 1.5, 1.5};
  std::array<int,3> num_elements = {16, 16, 16};

#if GRID == 1
  using HostGrid = YaspGrid<3, EquidistantOffsetCoordinates<double,3>>;
  MultiMesh<HostGrid> grid(2, lower_left, bbox, num_elements);
#elif GRID == 2
  using UGGrid<3>;
  StructuredGridFactory<MultiMesh<HostGrid> > gridFactory(2);
  StructuredGridFactory

  MultiMesh<HostGrid> grid(2);
#endif

  printGrid2(grid.grid(0));
  printGrid(grid);
  grid.grid(0).globalRefine(1);
  printGrid2(grid.grid(0));
  printGrid(grid);
}