diff --git a/CMakeLists.txt b/CMakeLists.txt
index 15f55e2e08e1534d4983eb25edd44f992fd2cd2c..371b6b4335e9579649be6e060912aa118ff14e8f 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -25,6 +25,7 @@ dune_enable_all_packages()
 add_subdirectory(src)
 add_subdirectory(dune)
 add_subdirectory(doc)
+add_subdirectory(test)
 add_subdirectory(cmake/modules)
 
 # finalize the dune project, e.g. generating config.h etc.
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 26bc7fbed9166cf12d0b15bd97a0c229ac99c4a0..fc625bc6d979741093c2c8598b75828ec4673f45 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -4,5 +4,8 @@ if (HAVE_ALBERTA)
   add_dune_alberta_flags(GRIDDIM 3 WORLDDIM 3 "testiterator")
 endif (HAVE_ALBERTA)
 
+add_executable("localrefinement" localrefinement.cc)
+target_link_dune_default_libraries("localrefinement")
+
 add_executable("uggrid" uggrid.cc)
 target_link_dune_default_libraries("uggrid")
diff --git a/src/localrefinement.cc b/src/localrefinement.cc
new file mode 100644
index 0000000000000000000000000000000000000000..6d92b19c0cdb97c93b7ef5c5dd6e2d0df2a371d3
--- /dev/null
+++ b/src/localrefinement.cc
@@ -0,0 +1,91 @@
+// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+// vi: set et ts=4 sw=2 sts=2:
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+#include <iostream>
+#include <dune/common/parallel/mpihelper.hh> // An initializer of MPI
+#include <dune/common/exceptions.hh> // We use exceptions
+#include <dune/common/timer.hh>
+
+#include <dune/grid/albertagrid.hh>
+#include <dune/grid/yaspgrid.hh>
+#include <dune/grid/uggrid.hh>
+#include <dune/alugrid/grid.hh>
+
+#include <dune/multimesh/multimesh.hh>
+#include <dune/multimesh/mmgridfactory.hh>
+#include <dune/multimesh/utility/structuredgridbuilder.hh>
+
+#define GRID 2
+
+using namespace Dune;
+
+template <class Grid>
+void printGrid(Grid const& grid)
+{
+  volatile std::size_t n = 0;
+  Dune::Timer t;
+  for (auto const& entities : elements(grid.leafGridView())) {
+    n += grid[0].leafIndexSet().index(entities[0]);
+    std::cout << "indices = [";
+    for (std::size_t i = 0; i < entities.size(); ++i) {
+      std::cout << (i > 0 ? ", " : "") << grid[i].leafIndexSet().index(entities[i]);
+    }
+    std::cout << "]\n";
+  }
+  std::cout << "n = " << n << "\n";
+  std::cout << "time: " << t.elapsed() << "\n";
+}
+
+template <class Grid>
+void printGrid2(Grid const& grid)
+{
+  volatile std::size_t n = 0;
+  Dune::Timer t;
+  for (auto const& entity : elements(grid.leafGridView()))
+    n += grid.leafIndexSet().index(entity);
+  std::cout << n << "\n";
+  std::cout << "time: " << t.elapsed() << "\n";
+}
+
+
+int main(int argc, char** argv)
+{
+  MPIHelper::instance(argc, argv);
+
+  FieldVector<double,2> lower_left = {-1.5, -1.5};
+  FieldVector<double,2> bbox = {1.5, 1.5};
+  std::array<unsigned int,2> num_elements = {2, 2};
+  using HostGrid = Dune::ALUGrid<2, 2, Dune::simplex, Dune::conforming>;
+
+  using Factory = StructuredGridBuilder<MultiMesh<HostGrid> >;
+  GridFactory<MultiMesh<HostGrid> > gridFactory(3);
+
+  auto gridPtr = Factory::createSimplexGrid(gridFactory, lower_left, bbox, num_elements);
+  auto& grid = *gridPtr;
+
+  std::cout << "number of grids = " << grid.size() << "\n";
+
+  printGrid2(grid[0]);
+  printGrid(grid);
+  grid[0].globalRefine(2);
+  grid[1].globalRefine(1);
+
+  // mark the first 5 elements for refinement
+  int num = 0;
+  for (auto const& entity : elements(grid[1].leafGridView())) {
+    if (num++ >= 5)
+      break;
+
+    grid[1].mark(1, entity);
+  }
+
+  grid[1].preAdapt();
+  grid[1].adapt();
+  grid[1].postAdapt();
+
+  printGrid2(grid[0]);
+  printGrid(grid);
+}
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b30cb10f7e2850ec22ab1fe41f4fcd7e4fc0a448
--- /dev/null
+++ b/test/CMakeLists.txt
@@ -0,0 +1 @@
+dune_add_test(SOURCES testvolume.cc)
\ No newline at end of file
diff --git a/test/testvolume.cc b/test/testvolume.cc
new file mode 100644
index 0000000000000000000000000000000000000000..a38f235d2ab3ffa6c5f1bf4acf92d26f77e67e07
--- /dev/null
+++ b/test/testvolume.cc
@@ -0,0 +1,64 @@
+// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+// vi: set et ts=4 sw=2 sts=2:
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <functional>
+#include <iostream>
+#include <numeric>
+
+#include <dune/common/filledarray.hh>
+#include <dune/common/parallel/mpihelper.hh>
+
+#include <dune/grid/yaspgrid.hh>
+
+#include <dune/multimesh/multimesh.hh>
+
+using namespace Dune;
+
+template <std::size_t dim>
+bool test_dim()
+{
+  FieldVector<double,dim> lower; lower = -1.5;
+  FieldVector<double,dim> upper; upper =  1.5;
+  auto num_elements = filledArray<dim>(2);
+
+  using HostGrid = YaspGrid<dim, EquidistantOffsetCoordinates<double,dim>>;
+  MultiMesh<HostGrid> grid(3, lower, upper, num_elements);
+
+  // volume of the domain
+  double domain = std::inner_product(lower.begin(), lower.end(), upper.begin(), 1.0, std::multiplies<>{},
+    [](double l, double u) { return std::abs(u - l); });
+  std::cout << "volume(domain<" << dim << ">) = " << domain << "\n";
+
+  grid[0].globalRefine(2);
+  grid[1].globalRefine(1);
+  grid[2].globalRefine(3);
+
+  // calculate volume by summing up the entity volumes of the smalles leaf entities
+  double volume = 0.0;
+  for (auto const& entities : elements(grid.leafGridView())) {
+    auto it_small = std::max_element(entities.begin(), entities.end(),
+      [](auto const& e1, auto const& e2) { return e1.level() < e2.level(); });
+
+    auto geo_small = it_small->geometry();
+    volume += geo_small.volume();
+  }
+  std::cout << "volume(elements<" << dim << ">) = " << volume << "\n";
+
+  if (std::abs(volume - domain) > 1.e-10)
+    return false;
+  else
+    return true;
+}
+
+int main(int argc, char** argv)
+{
+  MPIHelper::instance(argc, argv);
+  bool b1 = test_dim<1>();
+  bool b2 = test_dim<2>();
+  bool b3 = test_dim<3>();
+  return b1 && b2 && b3 ? 0 : 1;
+}