From f5e012b5419f7090e5c77cad24a531db1e92da4b Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Wed, 2 May 2012 10:38:53 +0000
Subject: [PATCH] Small changes and a new test for parallel interior
 boundaries.

---
 AMDiS/src/AMDiS.h                             |   1 +
 AMDiS/src/BoundaryObject.cc                   |   7 ++
 AMDiS/src/BoundaryObject.h                    |   2 +
 AMDiS/src/Mesh.cc                             |  12 ++++
 AMDiS/src/Mesh.h                              |   4 ++
 AMDiS/src/Serializer.h                        |  42 +++++------
 AMDiS/src/io/FileWriter.hh                    |  20 +++---
 AMDiS/src/parallel/DofComm.cc                 |  26 +++++++
 AMDiS/src/parallel/DofComm.h                  |  29 +++-----
 AMDiS/src/parallel/ElementObjectDatabase.cc   |  56 ++++++---------
 AMDiS/src/parallel/InteriorBoundary.cc        |  46 ++++++++----
 AMDiS/src/parallel/InteriorBoundary.h         |  18 +++--
 AMDiS/src/parallel/MeshDistributor.cc         |  61 ++++++----------
 AMDiS/src/parallel/MeshDistributor.h          |  13 ++--
 AMDiS/src/parallel/ParallelDebug.cc           |  15 ++--
 AMDiS/src/parallel/ParallelDebug.h            |  56 +++++++++------
 test/mpi/data/data0002a                       |  17 +++++
 test/mpi/data/data0002b                       |  16 +++++
 test/mpi/data/data0004a                       |  33 +++++++++
 .../data/data0005/interior_boundary_p0.ser    | Bin 0 -> 266 bytes
 .../data/data0005/interior_boundary_p1.ser    | Bin 0 -> 366 bytes
 .../data/data0005/interior_boundary_p10.ser   | Bin 0 -> 520 bytes
 .../data/data0005/interior_boundary_p11.ser   | Bin 0 -> 458 bytes
 .../data/data0005/interior_boundary_p12.ser   | Bin 0 -> 258 bytes
 .../data/data0005/interior_boundary_p13.ser   | Bin 0 -> 458 bytes
 .../data/data0005/interior_boundary_p14.ser   | Bin 0 -> 458 bytes
 .../data/data0005/interior_boundary_p15.ser   | Bin 0 -> 358 bytes
 .../data/data0005/interior_boundary_p2.ser    | Bin 0 -> 366 bytes
 .../data/data0005/interior_boundary_p3.ser    | Bin 0 -> 258 bytes
 .../data/data0005/interior_boundary_p4.ser    | Bin 0 -> 366 bytes
 .../data/data0005/interior_boundary_p5.ser    | Bin 0 -> 520 bytes
 .../data/data0005/interior_boundary_p6.ser    | Bin 0 -> 520 bytes
 .../data/data0005/interior_boundary_p7.ser    | Bin 0 -> 458 bytes
 .../data/data0005/interior_boundary_p8.ser    | Bin 0 -> 366 bytes
 .../data/data0005/interior_boundary_p9.ser    | Bin 0 -> 520 bytes
 test/mpi/init/test0004.dat.2d                 |  25 +++++++
 test/mpi/init/test0005.dat.2d                 |  25 +++++++
 test/mpi/src/test0005.cc                      |  66 ++++++++++++++++++
 38 files changed, 416 insertions(+), 174 deletions(-)
 create mode 100644 test/mpi/data/data0002a
 create mode 100644 test/mpi/data/data0002b
 create mode 100644 test/mpi/data/data0004a
 create mode 100644 test/mpi/data/data0005/interior_boundary_p0.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p1.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p10.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p11.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p12.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p13.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p14.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p15.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p2.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p3.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p4.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p5.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p6.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p7.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p8.ser
 create mode 100644 test/mpi/data/data0005/interior_boundary_p9.ser
 create mode 100644 test/mpi/init/test0004.dat.2d
 create mode 100644 test/mpi/init/test0005.dat.2d
 create mode 100644 test/mpi/src/test0005.cc

diff --git a/AMDiS/src/AMDiS.h b/AMDiS/src/AMDiS.h
index 2df55bcd..b71d46d6 100644
--- a/AMDiS/src/AMDiS.h
+++ b/AMDiS/src/AMDiS.h
@@ -142,6 +142,7 @@
 #if HAVE_PARALLEL_DOMAIN_AMDIS
 #include "parallel/InteriorBoundary.h"
 #include "parallel/MpiHelper.h"
+#include "parallel/ParallelDebug.h"
 #include "parallel/StdMpi.h"
 
 #if HAVE_PARALLEL_MTL4
diff --git a/AMDiS/src/BoundaryObject.cc b/AMDiS/src/BoundaryObject.cc
index 435fc002..7890608b 100644
--- a/AMDiS/src/BoundaryObject.cc
+++ b/AMDiS/src/BoundaryObject.cc
@@ -135,4 +135,11 @@ namespace AMDiS {
 	    type == other.type);
   }
 
+  bool AtomicBoundary::operator!=(const AtomicBoundary& other) const
+  {
+    return (rankObj != other.rankObj ||
+	    neighObj != other.neighObj ||
+	    type != other.type);
+  }
+
 }
diff --git a/AMDiS/src/BoundaryObject.h b/AMDiS/src/BoundaryObject.h
index 37f55b54..d34cda37 100644
--- a/AMDiS/src/BoundaryObject.h
+++ b/AMDiS/src/BoundaryObject.h
@@ -110,6 +110,8 @@ namespace AMDiS {
 
     bool operator==(const AtomicBoundary& other) const;
 
+    bool operator!=(const AtomicBoundary& other) const;
+
     /// The rank's part of the boundary.
     BoundaryObject rankObj;
 
diff --git a/AMDiS/src/Mesh.cc b/AMDiS/src/Mesh.cc
index 451b0703..a7f78628 100644
--- a/AMDiS/src/Mesh.cc
+++ b/AMDiS/src/Mesh.cc
@@ -1498,4 +1498,16 @@ namespace AMDiS {
 
     Element::deletedDOFs.clear();
   }
+
+
+  void Mesh::getElementIndexMap(map<int, Element*> &elIndexMap)
+  {
+    TraverseStack stack;
+    ElInfo *elInfo = stack.traverseFirst(this, -1, Mesh::CALL_EVERY_EL_PREORDER);
+    while (elInfo) {
+      Element *el = elInfo->getElement();
+      elIndexMap[el->getIndex()] = el;      
+      elInfo = stack.traverseNext(elInfo);
+    }
+  }
 }
diff --git a/AMDiS/src/Mesh.h b/AMDiS/src/Mesh.h
index 6d727bdf..1bf6e974 100644
--- a/AMDiS/src/Mesh.h
+++ b/AMDiS/src/Mesh.h
@@ -629,6 +629,10 @@ namespace AMDiS {
     }
 #endif
 
+    /// Creates a map for all elements in mesh that maps from element indices
+    /// to the corresponding pointers.
+    void getElementIndexMap(map<int, Element*> &elIndexMap);
+
   public:
     ///
     static const Flag FILL_NOTHING;
diff --git a/AMDiS/src/Serializer.h b/AMDiS/src/Serializer.h
index 4df563aa..531d8ba5 100644
--- a/AMDiS/src/Serializer.h
+++ b/AMDiS/src/Serializer.h
@@ -37,6 +37,8 @@
 
 namespace AMDiS {
 
+  using namespace std;
+
   template<typename ProblemType>
   class Serializer : public FileWriterInterface
   {
@@ -64,7 +66,7 @@ namespace AMDiS {
     }
 
 
-    Serializer(ProblemType *prob, std::string filename, int writeEveryIth)
+    Serializer(ProblemType *prob, string filename, int writeEveryIth)
       : name(filename),
 	problem(prob),
 	tsModulo(writeEveryIth),
@@ -119,10 +121,10 @@ namespace AMDiS {
       }
 
 #if HAVE_PARALLEL_DOMAIN_AMDIS
-      filename += ".p" + boost::lexical_cast<std::string>(MPI::COMM_WORLD.Get_rank());
+      filename += ".p" + boost::lexical_cast<string>(MPI::COMM_WORLD.Get_rank());
 #endif
 
-      std::ofstream out(filename.c_str());
+      ofstream out(filename.c_str());
       TEST_EXIT(out.is_open())("Cannot open serialization file!\n");
       out.write(reinterpret_cast<const char*>(&amdisRevisionNumber), sizeof(int));
       problem->serialize(out);
@@ -134,7 +136,7 @@ namespace AMDiS {
 
   protected:
     /// Name of file to which the problem is serialized.
-    std::string name;
+    string name;
 
     /// Pointer to the problem.
     ProblemType *problem;
@@ -159,40 +161,40 @@ namespace AMDiS {
   namespace SerUtil {
 
     template<typename T>
-    void serialize(std::ostream& out, T& data)
+    void serialize(ostream& out, T& data)
     {
       out.write(reinterpret_cast<const char*>(&data), sizeof(T));
     }   
 
     template<typename T>
-    void deserialize(std::istream& in, T& data)
+    void deserialize(istream& in, T& data)
     {
       in.read(reinterpret_cast<char*>(&data), sizeof(T));
     }   
 
 
 
-    void serialize(std::ostream& out, DofEdge& data);
+    void serialize(ostream& out, DofEdge& data);
 
-    void deserialize(std::istream& in, DofEdge& data);
+    void deserialize(istream& in, DofEdge& data);
 
 
 
-    void serialize(std::ostream& out, DofFace& data);
+    void serialize(ostream& out, DofFace& data);
 
-    void deserialize(std::istream& in, DofFace& data);
+    void deserialize(istream& in, DofFace& data);
 
 
 
     template<typename T, typename U>
-    void serialize(std::ostream& out, std::pair<T, U>& data)
+    void serialize(ostream& out, pair<T, U>& data)
     {
       serialize(out, data.first);
       serialize(out, data.second);
     }
 
     template<typename T, typename U>
-    void deserialize(std::istream& in, std::pair<T, U>& data)
+    void deserialize(istream& in, pair<T, U>& data)
     {
       deserialize(in, data.first);
       deserialize(in, data.second);
@@ -201,11 +203,11 @@ namespace AMDiS {
 
 
     template<typename T>
-    void serialize(std::ostream& out, std::vector<T>& data)
+    void serialize(ostream& out, vector<T>& data)
     {
       int vecSize = data.size();
       serialize(out, vecSize);
-      for (typename std::vector<T>::iterator it = data.begin(); 
+      for (typename vector<T>::iterator it = data.begin(); 
 	   it != data.end(); ++it) {
 	T v = *it;
 	serialize(out, v);
@@ -213,7 +215,7 @@ namespace AMDiS {
     }
 
     template<typename T>
-    void deserialize(std::istream& in, std::vector<T>& data)
+    void deserialize(istream& in, vector<T>& data)
     {
       data.clear();
 
@@ -231,7 +233,7 @@ namespace AMDiS {
 
 
     template<typename T>
-    void serialize(std::ostream& out, std::set<T>& data)
+    void serialize(ostream& out, std::set<T>& data)
     {
       int setSize = data.size();
       serialize(out, setSize);
@@ -243,7 +245,7 @@ namespace AMDiS {
     }
 
     template<typename T>
-    void deserialize(std::istream& in, std::set<T>& data)
+    void deserialize(istream& in, std::set<T>& data)
     {
       data.clear();
 
@@ -260,12 +262,12 @@ namespace AMDiS {
 
 
     template<typename T1, typename T2>
-    void serialize(std::ostream& out, std::map<T1, T2>& data)
+    void serialize(ostream& out, map<T1, T2>& data)
     {
       int mapSize = data.size();
       serialize(out, mapSize);
 
-      for (typename std::map<T1,T2>::iterator it = data.begin(); 
+      for (typename map<T1,T2>::iterator it = data.begin(); 
 	   it != data.end(); ++it) {
 	T1 v1 = it->first;
 	T2 v2 = it->second;
@@ -275,7 +277,7 @@ namespace AMDiS {
     }
 
     template<typename T1, typename T2>
-    void deserialize(std::istream& in, std::map<T1, T2>& data)
+    void deserialize(istream& in, map<T1, T2>& data)
     {
       data.clear();
 
diff --git a/AMDiS/src/io/FileWriter.hh b/AMDiS/src/io/FileWriter.hh
index 58aff48d..07f78fcc 100644
--- a/AMDiS/src/io/FileWriter.hh
+++ b/AMDiS/src/io/FileWriter.hh
@@ -174,10 +174,10 @@ namespace AMDiS {
 
   template<typename T>
   void FileWriterTemplated<T>::writeFiles(AdaptInfo *adaptInfo,
-			      bool force,
-			      int level,
-			      Flag flag,
-			      bool (*writeElem)(ElInfo*))
+					  bool force,
+					  int level,
+					  Flag flag,
+					  bool (*writeElem)(ElInfo*))
   {
     FUNCNAME("FileWriterTemplated<T>::writeFiles()");
 
@@ -186,19 +186,19 @@ namespace AMDiS {
 
     // Containers, which store the data to be written;
     std::vector<DataCollector<T>*> dataCollectors(solutionVecs.size());
-
+    
     if (writeElem) {
       for (int i = 0; i < static_cast<int>(dataCollectors.size()); i++)
 	dataCollectors[i] = new DataCollector<T>(feSpace, solutionVecs[i], 
-					      level, flag, writeElem);
+						 level, flag, writeElem);
     } else {
       for (int i = 0; i < static_cast<int>(dataCollectors.size()); i++)
 	dataCollectors[i] = new DataCollector<T>(feSpace, solutionVecs[i], 
-					      traverseLevel, 
-					      flag | traverseFlag, 
-					      writeElement);
+						 traverseLevel, 
+						 flag | traverseFlag, 
+						 writeElement);
     }
-
+    
     std::string fn = filename;
 
 #if HAVE_PARALLEL_DOMAIN_AMDIS
diff --git a/AMDiS/src/parallel/DofComm.cc b/AMDiS/src/parallel/DofComm.cc
index 53d241c4..52660d1b 100644
--- a/AMDiS/src/parallel/DofComm.cc
+++ b/AMDiS/src/parallel/DofComm.cc
@@ -18,6 +18,30 @@ namespace AMDiS {
 
   using namespace std;
 
+
+  void DofComm::init(int level, 
+		     MeshLevelData &ld,
+		     vector<const FiniteElemSpace*> &fe)
+  {
+    FUNCNAME("DofComm::init()");
+    
+    meshLevel = level;
+    levelData = &ld;
+    feSpaces = fe;
+    
+    nLevel = levelData->getLevelNumber() - meshLevel;
+    TEST_EXIT_DBG(nLevel >= 1)("Should not happen!\n");
+
+    sendDofs.clear();
+    recvDofs.clear();
+    periodicDofs.clear();
+    
+    sendDofs.resize(nLevel);
+    recvDofs.resize(nLevel);
+    periodicDofs.resize(nLevel);
+  }
+
+
   void DofComm::create(InteriorBoundary &boundary)
   {
     createContainer(boundary.getOwn(), sendDofs);
@@ -28,6 +52,8 @@ namespace AMDiS {
   void DofComm::createContainer(RankToBoundMap &boundary,
 				LevelDataType &data)
   {
+    FUNCNAME("DofComm::createContainer()");
+
     // === Fill data. ===
 
     for (unsigned int i = 0; i < feSpaces.size(); i++)
diff --git a/AMDiS/src/parallel/DofComm.h b/AMDiS/src/parallel/DofComm.h
index 7e3a7bea..ffd747dd 100644
--- a/AMDiS/src/parallel/DofComm.h
+++ b/AMDiS/src/parallel/DofComm.h
@@ -38,7 +38,10 @@ namespace AMDiS {
     DofComm() 
       : recvDofs(1),
 	sendDofs(1),
-	periodicDofs(0)
+	periodicDofs(0),
+	meshLevel(-1),
+	nLevel(0),
+	levelData(NULL)
     {}
     
     typedef map<const FiniteElemSpace*, DofContainer> FeMapType;
@@ -48,23 +51,9 @@ namespace AMDiS {
     // meshLevel: map[rank -> map[feSpace -> DofContainer]]
     typedef vector<DataType> LevelDataType;
 
-    void init(int n, vector<const FiniteElemSpace*> &fe)
-    {
-      FUNCNAME("DofComm::init()");
-
-      TEST_EXIT_DBG(n >= 1)("Should not happen!\n");
-
-      nLevel = n;
-      feSpaces = fe;
-
-      sendDofs.clear();
-      recvDofs.clear();
-      periodicDofs.clear();
-
-      sendDofs.resize(nLevel);
-      recvDofs.resize(nLevel);
-      periodicDofs.resize(nLevel);
-    }
+    void init(int level, 
+	      MeshLevelData &levelData, 
+	      vector<const FiniteElemSpace*> &fe);
 
     void create(InteriorBoundary &boundary);
 
@@ -119,8 +108,12 @@ namespace AMDiS {
     /// considered here. 
     LevelDataType periodicDofs;
 
+    int meshLevel;
+
     int nLevel;
 
+    MeshLevelData *levelData;
+
     vector<const FiniteElemSpace*> feSpaces;
 
     friend class Iterator;
diff --git a/AMDiS/src/parallel/ElementObjectDatabase.cc b/AMDiS/src/parallel/ElementObjectDatabase.cc
index 5ab67ef9..930b5fef 100644
--- a/AMDiS/src/parallel/ElementObjectDatabase.cc
+++ b/AMDiS/src/parallel/ElementObjectDatabase.cc
@@ -579,35 +579,23 @@ namespace AMDiS {
     TEST_EXIT_DBG(macroElementRankMap)("Should not happen!\n");
 
     int owner = -1;
-
-    switch (iterGeoPos) {
-    case VERTEX:
-      {
-	vector<ElementObjectData>& vertexData = vertexElements[vertexIter->first];
-	for (vector<ElementObjectData>::iterator it = vertexData.begin();
-	     it != vertexData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    case EDGE:
-      {
-	vector<ElementObjectData>& edgeData = edgeElements[edgeIter->first];
-	for (vector<ElementObjectData>::iterator it = edgeData.begin();
-	     it != edgeData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    case FACE:
-      {
-      	vector<ElementObjectData>& faceData = faceElements[faceIter->first];
-	for (vector<ElementObjectData>::iterator it = faceData.begin();
-	     it != faceData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    default:
-	ERROR_EXIT("Should not happen!\n");
-    }    
+    vector<ElementObjectData> *objData;
+    
+   switch (iterGeoPos) {
+   case VERTEX:
+     objData = &(vertexElements[vertexIter->first]);
+     break;
+   case EDGE:
+     objData = &(edgeElements[edgeIter->first]);
+     break;
+   case FACE:
+     objData = &(faceElements[faceIter->first]);
+     break;
+   }
+
+   for (vector<ElementObjectData>::iterator it = objData->begin();
+	it != objData->end(); ++it)
+     owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
 
     return owner;
   }
@@ -641,10 +629,12 @@ namespace AMDiS {
       }
       break;
     case FACE:
-      vector<ElementObjectData>& faceData = faceElements[faceIter->first];
-      for (vector<ElementObjectData>::iterator it = faceData.begin();
-	   it != faceData.end(); ++it)
-	ranksInLevel[0].insert((*macroElementRankMap)[it->elIndex]);
+      {
+	vector<ElementObjectData>& faceData = faceElements[faceIter->first];
+	for (vector<ElementObjectData>::iterator it = faceData.begin();
+	     it != faceData.end(); ++it)
+	  ranksInLevel[0].insert((*macroElementRankMap)[it->elIndex]);
+      }
       break;
     default:
       ERROR_EXIT("Should not happen!\n");
diff --git a/AMDiS/src/parallel/InteriorBoundary.cc b/AMDiS/src/parallel/InteriorBoundary.cc
index b7547cc3..863a5f79 100644
--- a/AMDiS/src/parallel/InteriorBoundary.cc
+++ b/AMDiS/src/parallel/InteriorBoundary.cc
@@ -19,6 +19,9 @@
 
 namespace AMDiS {
 
+  using namespace std;
+
+
   void InteriorBoundary::create(MPI::Intracomm &mpiComm,
 				ElementObjectDatabase &elObjDb)
   { 
@@ -345,13 +348,19 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::serialize(std::ostream &out)
+  void InteriorBoundary::serialize(ostream &out)
   {
-    FUNCNAME("InteriorBoundary::serialize()");
+    serialize(out, own);
+    serialize(out, other);
+    serialize(out, periodic);
+  }
 
-    ERROR_EXIT("REWRITE TO MULTILEVEL STRUCTURE!\n");
 
-#if 0
+  void InteriorBoundary::serialize(ostream &out,
+				   RankToBoundMap& boundary)
+  {
+    FUNCNAME("InteriorBoundary::serialize()");
+
     int mSize = boundary.size();
     SerUtil::serialize(out, mSize);
     for (RankToBoundMap::iterator it = boundary.begin(); 
@@ -380,18 +389,26 @@ namespace AMDiS {
 	SerUtil::serialize(out, bound.type);
       }
     }
-#endif
   }
 
 
-  void InteriorBoundary::deserialize(std::istream &in, 
-				     std::map<int, Element*> &elIndexMap)
+  void InteriorBoundary::deserialize(istream &in, Mesh *mesh)				     
   {
-    FUNCNAME("InteriorBoundary::deserialize()");
+    map<int, Element*> elIndexMap;
+    mesh->getElementIndexMap(elIndexMap);
+
+    deserialize(in, own, elIndexMap);
+    deserialize(in, other, elIndexMap);
+    deserialize(in, periodic, elIndexMap);
+  }
 
-    ERROR_EXIT("REWRITE TO MULTILEVEL STRUCTURE!\n");
 
-#if 0
+  void InteriorBoundary::deserialize(istream &in, 
+				     RankToBoundMap& boundary,
+				     map<int, Element*> &elIndexMap)
+  {
+    FUNCNAME("InteriorBoundary::deserialize()");
+
     int mSize = 0;
     SerUtil::deserialize(in, mSize);
     for (int i = 0; i < mSize; i++) {
@@ -431,14 +448,13 @@ namespace AMDiS {
 
 	// For the case of periodic interior boundaries, a rank may have an
 	// boundary with itself. In this case, also the pointer to the neighbour
-	//  object must be set correctly.
+	// object must be set correctly.
 	if (elIndexMap.count(bound.neighObj.elIndex))
 	  bound.neighObj.el = elIndexMap[bound.neighObj.elIndex];
 	else
 	  bound.neighObj.el = NULL;
       }
     }
-#endif
   }
 
 
@@ -466,7 +482,7 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::serializeExcludeList(std::ostream &out, 
+  void InteriorBoundary::serializeExcludeList(ostream &out, 
 					      ExcludeList &list)
   {
     int size = list.size();
@@ -478,7 +494,7 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::deserializeExcludeList(std::istream &in, 
+  void InteriorBoundary::deserializeExcludeList(istream &in, 
 						ExcludeList &list)
   {
     int size = 0;
@@ -492,7 +508,7 @@ namespace AMDiS {
 
       SerUtil::deserialize(in, a);
       SerUtil::deserialize(in, b);
-      list.push_back(std::make_pair(a, b));
+      list.push_back(make_pair(a, b));
     }
   }
 
diff --git a/AMDiS/src/parallel/InteriorBoundary.h b/AMDiS/src/parallel/InteriorBoundary.h
index 0695df09..293b854b 100644
--- a/AMDiS/src/parallel/InteriorBoundary.h
+++ b/AMDiS/src/parallel/InteriorBoundary.h
@@ -44,12 +44,6 @@ namespace AMDiS {
     void create(MPI::Intracomm &mpiComm,
 		ElementObjectDatabase &elObjDb);
 
-    /// Writes this object to a file.
-    void serialize(ostream &out);
-
-    /// Reads the state of an interior boundary from a file.
-    void deserialize(istream &in, map<int, Element*> &elIndexMap);
-
     RankToBoundMap& getOwn()
     {
       return own;
@@ -70,6 +64,12 @@ namespace AMDiS {
       return static_cast<bool>(periodic.size());
     }
 
+    /// Writes this object to a file.
+    void serialize(ostream &out);
+
+    /// Reads the state of an interior boundary from a file.
+    void deserialize(istream &in, Mesh *mesh);
+
   private:
     AtomicBoundary& getNewOwn(int rank);
 
@@ -77,6 +77,12 @@ namespace AMDiS {
 
     AtomicBoundary& getNewPeriodic(int rank);
 
+    void serialize(ostream &out, RankToBoundMap& boundary);
+
+    void deserialize(istream &in, 
+		     RankToBoundMap& boundary,
+		     map<int, Element*> &elIndexMap);
+
     void serializeExcludeList(ostream &out, ExcludeList &list);
 
     void deserializeExcludeList(istream &in, ExcludeList &list);    
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 8dc62837..fa91fcb0 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -228,12 +228,8 @@ namespace AMDiS {
     createMeshLevelStructure();
 
     // Create interior boundary information.
-    createInteriorBoundaryInfo();
+    createInteriorBoundary(true);
 
-#if (DEBUG != 0)    
-    ParallelDebug::printBoundaryInfo(*this);
-#endif
-    
     // === Remove neighbourhood relations due to periodic bounday conditions. ===
 
     for (deque<MacroElement*>::iterator it = mesh->firstMacroElement();
@@ -1438,7 +1434,7 @@ namespace AMDiS {
     mesh->dofCompress();
     partitioner->createPartitionMap(partitionMap);
 
-    updateInteriorBoundaryInfo();
+    createInteriorBoundary(false);
     updateLocalGlobalNumbering();
 
     
@@ -1509,19 +1505,12 @@ namespace AMDiS {
   }
 
 
-  void MeshDistributor::createInteriorBoundaryInfo()
+  void MeshDistributor::createInteriorBoundary(bool firstCall)
   {
-    FUNCNAME("MeshDistributor::createInteriorBoundaryInfo()");
-
-    elObjDb.create(partitionMap, levelData);
-    elObjDb.updateRankData();
-    intBoundary.create(mpiComm, elObjDb);
-  }
+    FUNCNAME("MeshDistributor::createInteriorBoundary()");
 
-
-  void MeshDistributor::updateInteriorBoundaryInfo()
-  {
-    FUNCNAME("MeshDistributor::updateInteriorBoundaryInfo()");
+    if (firstCall)
+      elObjDb.create(partitionMap, levelData);
 
     elObjDb.updateRankData();
     intBoundary.create(mpiComm, elObjDb);
@@ -1536,17 +1525,18 @@ namespace AMDiS {
   {
     FUNCNAME("MeshDistributor::createBoundaryDofs()");
 
-    dofComm.init(levelData.getLevelNumber(), feSpaces);
-    dofComm.create(intBoundary);
-
-    createBoundaryDofInfo();
-  }
+    // === Create DOF communicator. ===
 
+    dofComm.init(0, levelData, feSpaces);
+    dofComm.create(intBoundary);
 
-  void MeshDistributor::createBoundaryDofInfo()
-  {
-    FUNCNAME("MeshDistributor::createBoundaryDofInfo()");
+    if (levelData.getLevelNumber() > 1) {
+      dofCommSd.init(1, levelData, feSpaces);
+      dofCommSd.create(intBoundary);
+    }
 
+    // === If requested, create more information on communication DOFs. ===
+    
     if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED))
       return;
 
@@ -1960,24 +1950,17 @@ namespace AMDiS {
     SerUtil::deserialize(in, elemWeights);
     SerUtil::deserialize(in, partitionMap);
 
-    // Create two maps: one from from element indices to the corresponding element 
-    // pointers, and one map from Dof indices to the corresponding dof pointers.
-    map<int, Element*> elIndexMap;
+    // Create a map from DOF indices to the corresponding  DOF pointers.
     map<const FiniteElemSpace*, map<int, const DegreeOfFreedom*> > dofIndexMap;
     for (unsigned int i = 0; i < feSpaces.size(); i++) {
       ElementDofIterator elDofIter(feSpaces[i]);
       TraverseStack stack;
-      ElInfo *elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_EVERY_EL_PREORDER);
+      ElInfo *elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_LEAF_EL);
       while (elInfo) {
-	Element *el = elInfo->getElement();
-	elIndexMap[el->getIndex()] = el;
-	
-	if (el->isLeaf()) {
-	  elDofIter.reset(el);
-	  do {
-	    dofIndexMap[feSpaces[i]][elDofIter.getDof()] = elDofIter.getDofPtr();
-	  } while (elDofIter.next());      
-	}
+	elDofIter.reset(elInfo->getElement());
+	do {
+	  dofIndexMap[feSpaces[i]][elDofIter.getDof()] = elDofIter.getDofPtr();
+	} while (elDofIter.next());      
 	
 	elInfo = stack.traverseNext(elInfo);
       }
@@ -1985,7 +1968,7 @@ namespace AMDiS {
 
     elObjDb.deserialize(in);
    
-    intBoundary.deserialize(in, elIndexMap);
+    intBoundary.deserialize(in, mesh);
 
     dofComm.deserialize(in, dofIndexMap);
 
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index 4299edf7..24dfbd4f 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -162,6 +162,11 @@ namespace AMDiS {
       return dofComm;
     }
 
+    InteriorBoundary& getIntBoundary()
+    {
+      return intBoundary;
+    }
+
     inline long getLastMeshChangeIndex()
     {
       return lastMeshChangeIndex;
@@ -282,14 +287,10 @@ namespace AMDiS {
 
     /// Determines the interior boundaries, i.e. boundaries between ranks, and
     /// stores all information about them in \ref interiorBoundary.
-    void createInteriorBoundaryInfo();
-
-    void updateInteriorBoundaryInfo();
+    void createInteriorBoundary(bool firstCall);
 
     void createBoundaryDofs();
 
-    void createBoundaryDofInfo();
-
     /// Removes all macro elements from the mesh that are not part of ranks 
     /// partition.
     void removeMacroElements();
@@ -472,6 +473,8 @@ namespace AMDiS {
 
     DofComm dofComm;
 
+    DofComm dofCommSd;
+
     PeriodicMap periodicMap;
 
     /// This set of values must be interchanged between ranks when the mesh is 
diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc
index a8ed8ee6..b1ff9947 100644
--- a/AMDiS/src/parallel/ParallelDebug.cc
+++ b/AMDiS/src/parallel/ParallelDebug.cc
@@ -715,16 +715,19 @@ namespace AMDiS {
   }
 
 
-  void ParallelDebug::printBoundaryInfo(MeshDistributor &pdb)
+  void ParallelDebug::printBoundaryInfo(MeshDistributor &pdb, 
+					int level, 
+					bool force)
   {
     FUNCNAME("ParallelDebug::printBoundaryInfo()");
 
     int tmp = 0;
     Parameters::get("parallel->debug->print boundary info", tmp);
-    if (tmp <= 0)
+    if (tmp <= 0 && force == false)
       return;
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.own); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.own, level); 
+	 !it.end(); ++it) {
       MSG("Rank owned boundary with rank %d: \n", it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
 	  it->rankObj.elIndex, it->rankObj.subObj, it->rankObj.ithObj);
@@ -732,7 +735,8 @@ namespace AMDiS {
 	  it->neighObj.elIndex, it->neighObj.subObj, it->neighObj.ithObj);
     }
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.other); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.other, level); 
+	 !it.end(); ++it) {
       MSG("Other owned boundary with rank %d: \n", it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
 	  it->rankObj.elIndex, it->rankObj.subObj, it->rankObj.ithObj);
@@ -740,7 +744,8 @@ namespace AMDiS {
 	  it->neighObj.elIndex, it->neighObj.subObj, it->neighObj.ithObj);
     }
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.periodic); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.periodic, level); 
+	 !it.end(); ++it) {
       MSG("Periodic boundary (ID %d) with rank %d: \n", 
 	  it->type, it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
diff --git a/AMDiS/src/parallel/ParallelDebug.h b/AMDiS/src/parallel/ParallelDebug.h
index 5536f802..ee87bfc4 100644
--- a/AMDiS/src/parallel/ParallelDebug.h
+++ b/AMDiS/src/parallel/ParallelDebug.h
@@ -62,22 +62,22 @@ namespace AMDiS {
 				     const FiniteElemSpace *feSpace);
 
     /** \brief
-     * This function is used for debugging only. It traverses all interior boundaries
-     * and compares the DOF indices on them with the DOF indices of the boundarys
-     * neighbours. The function fails, when DOF indices on an interior boundary do
-     * not fit together.
+     * This function is used for debugging only. It traverses all interior
+     * boundaries and compares the DOF indices on them with the DOF indices of
+     * the boundarys neighbours. The function fails, when DOF indices on an
+     * interior boundary do not fit together.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
-     * \param[in]  printCoords   If true, the coords of all common dofs are printed
-     *                           to the screen.
+     * \param[in]  printCoords   If true, the coords of all common dofs are
+     *                           printed to the screen.
      */
     static void testCommonDofs(MeshDistributor &pdb, bool printCoords = false);
 
     /** \brief
-     * This function is used for debugging only. It checks if on all ranks DOFs with
-     * the same coordinates have the same global index. For this, the function genartes
-     * on all ranks a list of all DOFs with their coordinates and global indices and 
-     * sends the list to all neighbouring ranks.
+     * This function is used for debugging only. It checks if on all ranks DOFs
+     * with the same coordinates have the same global index. For this, the
+     * function genartes on all ranks a list of all DOFs with their coordinates
+     * and global indices and  sends the list to all neighbouring ranks.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
      */
@@ -122,14 +122,14 @@ namespace AMDiS {
     static void printMapPeriodic(MeshDistributor &pdb, int rank = -1);
 
     /** \brief
-     * This function is used for debugging only. It prints information about dofs
+     * This function is used for debugging only. It prints information about DOFs
      * in rank's partition.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
-     * \param[in]  rank          If specified, only the information from the given 
-     *                           rank is printed.
-     * \param[in]  rankDofs      List of all dofs in ranks partition that are owned 
-     *                           by rank.
+     * \param[in]  rank          If specified, only the information from the
+     *                           given rank is printed.
+     * \param[in]  rankDofs      List of all dofs in ranks partition that are
+     *                           owned by rank.
      * \param[in]  rankAllDofs   List of all dofs in ranks partition.
      */
     static void printRankDofs(MeshDistributor &pdb,
@@ -138,28 +138,38 @@ namespace AMDiS {
 			      DofContainer& rankAllDofs);
 
     /** \brief
-     * This functions prints all information about all interior boundaries on all ranks.
+     * This functions prints all information about all interior boundaries on 
+     * all ranks.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
+     * \param[in]  level         Mesh level number for which the boundary should
+     *                           be printed.
+     * \param[in]  force         If true, the information is always printed to
+     *                           screen. Otherwise, this is done only if AMDiS
+     *                           is compiled in debug mode or if the init file
+     *                           parameter "parallel->debug->print boundary info"
+     *                           is set.
      */
-    static void printBoundaryInfo(MeshDistributor &pdb);
+    static void printBoundaryInfo(MeshDistributor &pdb, 
+				  int level = 0, 
+				  bool force = false);
 
     static void writeDebugFile(MeshDistributor &pdb,
 			       std::string prefix, std::string postfix);
 
     /** \brief
-     * This functions create a Paraview file with the macro mesh where the elements
-     * are colored by the partition they are part of.
+     * This functions create a Paraview file with the macro mesh where the
+     * elements are colored by the partition they are part of.
      */
     static void writePartitioning(MeshDistributor &pdb, string filename);
 
     /** \brief
-     * The mesh is written to a value and all values are assigned by the rank number
-     * where the vertex is contained in.
+     * The mesh is written to a value and all values are assigned by the rank
+     * number where the vertex is contained in.
      *
      * \param[in]  filename    Name of the output file without extension (.vtu).
-     * \param[in]  counter     Counter index. If not negative, this number is added
-     *                         to the filename.
+     * \param[in]  counter     Counter index. If not negative, this number is
+     *                         added to the filename.
      * \param[in]  feSpace
      */
     static void writePartitioningFile(std::string filename,
diff --git a/test/mpi/data/data0002a b/test/mpi/data/data0002a
new file mode 100644
index 00000000..6e844b2f
--- /dev/null
+++ b/test/mpi/data/data0002a
@@ -0,0 +1,17 @@
+# rank_id primals duals dofs start_dofs overall_dofs
+0 0 2 4 0 81
+1 1 3 4 4 81
+2 1 3 4 8 81
+3 1 2 6 12 81
+4 1 3 4 18 81
+5 1 4 4 22 81
+6 1 4 4 26 81
+7 2 3 6 30 81
+8 1 3 4 36 81
+9 1 4 4 40 81
+10 1 4 4 44 81
+11 2 3 6 48 81
+12 1 2 6 54 81
+13 2 3 6 60 81
+14 2 3 6 66 81
+15 3 2 9 72 81
diff --git a/test/mpi/data/data0002b b/test/mpi/data/data0002b
new file mode 100644
index 00000000..cac41705
--- /dev/null
+++ b/test/mpi/data/data0002b
@@ -0,0 +1,16 @@
+0 0 30 256 0 4225
+1 1 45 256 256 4225
+2 1 45 256 512 4225
+3 1 30 272 768 4225
+4 1 45 256 1040 4225
+5 1 60 256 1296 4225
+6 1 60 256 1552 4225
+7 2 45 272 1808 4225
+8 1 45 256 2080 4225
+9 1 60 256 2336 4225
+10 1 60 256 2592 4225
+11 2 45 272 2848 4225
+12 1 30 272 3120 4225
+13 2 45 272 3392 4225
+14 2 45 272 3664 4225
+15 3 30 289 3936 4225
diff --git a/test/mpi/data/data0004a b/test/mpi/data/data0004a
new file mode 100644
index 00000000..99136fa9
--- /dev/null
+++ b/test/mpi/data/data0004a
@@ -0,0 +1,33 @@
+# rank_id level_id send_dofs recv_dof
+0 0 0 3
+0 1 0 0
+1 0 1 3
+1 1 0 2
+2 0 1 3
+2 1 1 1
+3 0 1 2
+3 1 0 0
+4 0 1 3
+4 1 0 2
+5 0 1 3
+5 1 0 3
+6 0 1 3
+6 1 1 2
+7 0 2 2
+7 1 0 2
+8 0 1 3
+8 1 1 1
+9 0 1 3
+9 1 1 2
+10 0 1 3
+10 1 1 2
+11 0 2 2
+11 1 2 0
+12 0 1 2
+12 1 0 0
+13 0 2 2
+13 1 0 2
+14 0 2 2
+14 1 2 0
+15 0 3 0
+15 1 0 0
diff --git a/test/mpi/data/data0005/interior_boundary_p0.ser b/test/mpi/data/data0005/interior_boundary_p0.ser
new file mode 100644
index 0000000000000000000000000000000000000000..c230d070cb5668e1c088410570479ed22a04ef0d
GIT binary patch
literal 266
zcmZQzU|?WoU|?WmU|?W^(jYMyAH)VL`2YX^e~=&ug9SkZj04jN=7TsO)gT%y08$CU
zEKoZ@Y!C)Xf)s(+44e!M3?N+~K@bKDA`>u`U`d#2m>^ssE7T8QRZuouk{h2L0OiIM
AJpcdz

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p1.ser b/test/mpi/data/data0005/interior_boundary_p1.ser
new file mode 100644
index 0000000000000000000000000000000000000000..e761ef7007126b2425b681804a5be5060226477d
GIT binary patch
literal 366
zcmZQ%fB+^21_ovblK~_K!eB0p0ir>Qz=9wG#sSeFNst&=D~JITgiC<T1_^>Nhz(K%
zk_7QVYz9^a28RFt|NjRGf-qPRM8H&oRDvWxn1KbV8YT!*2*w~|K!P9)5&)@&u^G6b
rx<G;;3{n8bFcmNwED2K$69lUS5o`<$V2{FF238G{0AXHSc7P-S4wMvX

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p10.ser b/test/mpi/data/data0005/interior_boundary_p10.ser
new file mode 100644
index 0000000000000000000000000000000000000000..9b96093fcea131e02e930cce55756f5f398c58c4
GIT binary patch
literal 520
zcmZQ(U|?WnU|?WmU|<ksfB+C1guy&+FbPorWkM)61_lNuT)KE5YQV;V6hXNNJ5U5+
zs$qg~%Q^AcAq+JXq#C9ZEF=OZA+7^Sg1Io&AeA6N5C%(tLIC7RkOXoFfcRhq;tUK7
z|NsC057Gg`U_lT8Qw>rHk^o@_F{o;oAV?t?^FjRra~()END)|47OD#*2*O}N5CKyS
bQwf%YsfG!H6oN57mY4)ffCzb9c7P-S%9j=i

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p11.ser b/test/mpi/data/data0005/interior_boundary_p11.ser
new file mode 100644
index 0000000000000000000000000000000000000000..b609dde15d8df081332b8559196ce595e8e34085
GIT binary patch
literal 458
zcmZQ(U|?WlU|?WmU|<kufB+C1guy&sFbPorWkM)+1_lO@ZmhcaAZozIGC|p33RyK=
zf*&k}#SWNixJr;hnAtEvxKp_p7#Ki;FkK)C5C&;t5CxMEJ3xw{Tm}X)C<`Qrp&BL#
zmjE$9&IS1#rV}IvQV2Fko`Hek|NsC0L4qI*76cK<t^)DF7+E#k2#^fSY?vU}D41%P
JAV?t`0|4uh4c-6%

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p12.ser b/test/mpi/data/data0005/interior_boundary_p12.ser
new file mode 100644
index 0000000000000000000000000000000000000000..fbbaf66529e4c006aa747fb4f96a3537255a3493
GIT binary patch
literal 258
zcmZQ%U|`^2U|?WkU|^7BfB=vf2!nY7V3L6WBmhzb<sww02*Omu1VOsMIzS9w1_lOZ
zG<#qQ8KfB)82<nN{~x3fgu%K%1hT6@d@z<mQVll(Bm+|o5(F6o!VECgFhP()I0gXO
CE)5p|

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p13.ser b/test/mpi/data/data0005/interior_boundary_p13.ser
new file mode 100644
index 0000000000000000000000000000000000000000..2321c99fb0e81c4966a733a9448d4baeee7f27e6
GIT binary patch
literal 458
zcmZQ(U|`^2U|?WmU|^7DfB+C1guy&PFbPorWkM)U1_lNuT)KoHYQV;V6hXNNJ5U5+
zs$qg~%Xt_W7(gz>>M}{Fp&->TonRp;FbQ!oTmo4&R31XXRD)E4i~@_oRKo<}5+DX2
z7FU53f_2I=Ffjc8|NlQoAqay7K?JfNKzuNkK~fDj0we=d4H5(y1Hue2)i6PjLO2Eh
DH+c>q

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p14.ser b/test/mpi/data/data0005/interior_boundary_p14.ser
new file mode 100644
index 0000000000000000000000000000000000000000..cc4ecf931e4c60fc9e2637c277dac6d0e8a0f547
GIT binary patch
literal 458
zcmZQ(U|`^6U|?WmU|^7CfB+C1guy&vFbPorWkM(}1_lNuT)IRcYQV;V6hXNNJ5U5+
zs$qg~%Xt|X7(gz>>N07lp&->TonRpuFbQ!oTmo4&R31XXRD)E4i~@_oRKo<}5+DXY
z7FU53f_2I>Ffjc8|NlQoAqay7K?JfNKzuNkLsAVl0we=d4H5(y1Hue2)i6PjLO2Eh
DU7rp}

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p15.ser b/test/mpi/data/data0005/interior_boundary_p15.ser
new file mode 100644
index 0000000000000000000000000000000000000000..7c839a8ae54d482cb748ff0cfaade00d5f148445
GIT binary patch
literal 358
zcmZQ(U|`^4U|?WmU|^7EfB+C1guy&fFbPorWkM)!1_lO@Zmhb*AZozIGC|p33RyK=
zLL4lG#SWNixJr;hnAtEvxKsJ?xkwhI73xu#POy+1n1omkmq1nxm4{F;)gYB{i(#r^
Lf^Z3l9tH*gTaW?O

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p2.ser b/test/mpi/data/data0005/interior_boundary_p2.ser
new file mode 100644
index 0000000000000000000000000000000000000000..a092854f7310131479f73841b3527d9bf6dc59cc
GIT binary patch
literal 366
zcmZQ%U|;}YCI$uuRt5+F@j)2OV+NB5MNlpS0}GS|(utuOCWs;dG72PtY!8SJHi(^p
zf#LuE|NlWcKo~3tB4DaPDnSw;%)ka!4HE<@1Y?i{ND$^akZKSgEXfPi1rh{dupo$l
isfMWpOTtvc1VIYH801ut3Yg14Y_J4~;KyYLND=@h;uMMi

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p3.ser b/test/mpi/data/data0005/interior_boundary_p3.ser
new file mode 100644
index 0000000000000000000000000000000000000000..5a54c7ebbff83cf11d48807c413891f33ec56411
GIT binary patch
literal 258
zcmZQ%U|?W^Vs-`y0EvMxn8ykx85lqUAcasa0|OhB1rh{Fg1Io&FhRHkhygO6nE{~_
zBnDCl*2&Mn!0`Y7|NkIC5C#i^2#_uijUtGw8g2wg6|$>9e6TK<Y7~_qSr7&QfIbX%

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p4.ser b/test/mpi/data/data0005/interior_boundary_p4.ser
new file mode 100644
index 0000000000000000000000000000000000000000..06fc02c92bc368b896d364ba1091c9d37b1e5a27
GIT binary patch
literal 366
zcmZQ%fB+^21_n+DlK~_K!cZ=R0ttW=L3l{2Q3PSKFhP(muoyD~0|P4q0|Q6`*&Yxd
ztbm(=f#LuE|NlWcKo~3tB4DaPDnSw;%)kXz4HE<@1Y-`U9UvMc08$N71Y$D?LUn-z
pK^QCuB4DavD#4O4)i6PjLNEro2&4i;g9Jcq5Dmt{xa<JQ0suip6u1BY

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p5.ser b/test/mpi/data/data0005/interior_boundary_p5.ser
new file mode 100644
index 0000000000000000000000000000000000000000..2144bb7c888416c787b6682e3a897c7f453c26a3
GIT binary patch
literal 520
zcmZQ(fB;4Y1_o{jlL5pAVK5iQfYD$<5CIZk!ler&fvgC`24k2VFa=;i7zaj!j0Q`v
zFfcHH1Yx>B5+Dqg<OGupVCRAqLAeYJTu>HB5JNRg5H0}<0X8i5fE0oa;$>i9`2YX^
ze~=Ck1`C1+n5#f4K@uR$zynnc69g#)V~}$}f-u*CRD<|nNnxlikRS+y1wjN%HB2Q~
d5~dm^2vP{fAk`ohFqeVYU<nW*ipvg=BmmV`6(axu

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p6.ser b/test/mpi/data/data0005/interior_boundary_p6.ser
new file mode 100644
index 0000000000000000000000000000000000000000..1c5b3568d2b0bc55612309217199d86f8605c8fe
GIT binary patch
literal 520
zcmZQ(U|?W`VqOLafbqdRkOB|`BnYD6n2CXb0fe#YVu30H=>jQ&av2z4s!;@Cs$qg4
zU0@xo`0U^YD}gv4rV}j0112G^gBt}?4N?gb1Yxi`kO$eZ*aK1sR>04|!0`Y7|NkHz
zAPg1+5y*Z4Nq{f|A5=9+J6Hgu5`?*+cEDT*k_0IN$uWpRb%6vy7%T`PV5(s%!ICi5
bFhP()Fa~)NqypwL5F0E3BE)go0g?m&25%Kn

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p7.ser b/test/mpi/data/data0005/interior_boundary_p7.ser
new file mode 100644
index 0000000000000000000000000000000000000000..85239548e96dd77d26e94833484d3f6b73511c23
GIT binary patch
literal 458
zcmZQ(U|?WkU|?WmU|`^9fB+C1guy&kFbPorWkM*BP7ubbiw&X%Y%IulC>PZZxCA>?
z8g40+4|fq<B}f8hHcSw%3nT#&#OfkmkXEQiL5iSU1_nMT3nU0P3Z@z+2$ujcxEUB2
zKt6}*1c`wZf(;U9U|{(F|NnoGAP9p6K?Jg^KzuMpRt+}-Bm*-WCI~hPrWz&)QV7QY
DQ!x!b

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p8.ser b/test/mpi/data/data0005/interior_boundary_p8.ser
new file mode 100644
index 0000000000000000000000000000000000000000..887f19bac40fc774ea9797238e29c05ab873e854
GIT binary patch
literal 366
zcmZQ%U|?WjU|?WkU|<krfB=vf2!nYXV3L6WBmhzb<sww02*Omu1VOsMI+z(47&x)m
z15?N#%)r3#|NsC0ARQnK)&(M9t^%n9Nq{he5L7ix5Tp={d7yR(fO%-nmW1j883Dpj
pH4F?e)ga?Qf?!FQYM3BM0*rYXAm+kc238H0VPIg8#$^Xc0RW<X6*vF@

literal 0
HcmV?d00001

diff --git a/test/mpi/data/data0005/interior_boundary_p9.ser b/test/mpi/data/data0005/interior_boundary_p9.ser
new file mode 100644
index 0000000000000000000000000000000000000000..c5faae49ef912126acd87a9481f4b224d88a89c3
GIT binary patch
literal 520
zcmZQ(U|?WjU|?WmU|<ktfB+C1guy&cFbPorWkM)c1_lNuT)Ma*YQV;V6hXNNJ5U5+
zs$qg~%Q^7bAqX`Tq#C9ZEF=UbA+7^Sg1Io&AeA6N5C%(tLIC6dkOXoFfcRhqq6`cS
z|NsC057Gg`U_lT8Qw>rHk^o@_5vXdIAV?t?^FsXsa~()END)|48mbE<2*O}N5CKyS
bQwf%YsfG!H6oN4ymY4)ffCyP!c7P-SnT-|O

literal 0
HcmV?d00001

diff --git a/test/mpi/init/test0004.dat.2d b/test/mpi/init/test0004.dat.2d
new file mode 100644
index 00000000..379825ba
--- /dev/null
+++ b/test/mpi/init/test0004.dat.2d
@@ -0,0 +1,25 @@
+dimension of world:             2
+
+elliptMesh->macro file name:    ./macro/macro.stand.p16.2d
+elliptMesh->global refinements: 1
+
+ellipt->mesh:                   elliptMesh
+ellipt->dim:                    2
+ellipt->components:             1
+ellipt->polynomial degree[0]:   1
+ 
+ellipt->solver:                 cg
+ellipt->solver->max iteration:  10
+ellipt->solver->tolerance:      1.e-8
+ellipt->solver->info:           10
+ellipt->solver->left precon:    diag
+ellipt->solver->right precon:   no
+
+ellipt->estimator[0]:              0
+ellipt->marker[0]->strategy:       0 
+
+parallel->log main rank:     0
+parallel->pre refine:        0
+parallel->partitioner:       checker
+parallel->multi level test:  1
+parallel->solver:            petsc-feti
diff --git a/test/mpi/init/test0005.dat.2d b/test/mpi/init/test0005.dat.2d
new file mode 100644
index 00000000..379825ba
--- /dev/null
+++ b/test/mpi/init/test0005.dat.2d
@@ -0,0 +1,25 @@
+dimension of world:             2
+
+elliptMesh->macro file name:    ./macro/macro.stand.p16.2d
+elliptMesh->global refinements: 1
+
+ellipt->mesh:                   elliptMesh
+ellipt->dim:                    2
+ellipt->components:             1
+ellipt->polynomial degree[0]:   1
+ 
+ellipt->solver:                 cg
+ellipt->solver->max iteration:  10
+ellipt->solver->tolerance:      1.e-8
+ellipt->solver->info:           10
+ellipt->solver->left precon:    diag
+ellipt->solver->right precon:   no
+
+ellipt->estimator[0]:              0
+ellipt->marker[0]->strategy:       0 
+
+parallel->log main rank:     0
+parallel->pre refine:        0
+parallel->partitioner:       checker
+parallel->multi level test:  1
+parallel->solver:            petsc-feti
diff --git a/test/mpi/src/test0005.cc b/test/mpi/src/test0005.cc
new file mode 100644
index 00000000..a136c26b
--- /dev/null
+++ b/test/mpi/src/test0005.cc
@@ -0,0 +1,66 @@
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE 0005
+#define BOOST_TEST_NO_MAIN
+
+#include <boost/test/unit_test.hpp>
+#include <AMDiS.h>
+
+using namespace AMDiS;
+using namespace std;
+
+/*
+  - 
+*/
+
+
+void boundaryTest(RankToBoundMap &map0, RankToBoundMap &map1)
+{
+  BOOST_CHECK_MESSAGE(map0.size() == map1.size(), 
+		      "Boundaries have different size!\n");
+
+  for (RankToBoundMap::iterator it = map0.begin(); it != map0.end(); ++it) {
+    if (map1.count(it->first) == 0) {
+      MSG("Boundary with rank %d missing!\n", it->first);
+      BOOST_ERROR("");
+    }
+
+    BOOST_CHECK_MESSAGE(it->second.size() == map1[it->first].size(), 
+			"Wrong boundary size!\n");
+
+    for (unsigned int i = 0; i < it->second.size(); i++)
+      BOOST_CHECK_MESSAGE(it->second[i] == map1[it->first][i],
+			  "Boundary is not equal!\n");
+  }
+}
+
+BOOST_AUTO_TEST_CASE(amdis_mpi_feti_multilevel)
+{
+  BOOST_REQUIRE(MPI::COMM_WORLD.Get_size() == 16);
+
+  ProblemStat ellipt("ellipt");
+  ellipt.initialize(INIT_ALL);
+
+  MeshDistributor *meshDist = MeshDistributor::globalMeshDistributor;
+  meshDist->initParallelization();
+  
+  InteriorBoundary testBoundary;  
+  string filename = "data/data0005/interior_boundary_p" + 
+    lexical_cast<string>(MPI::COMM_WORLD.Get_rank()) + ".ser";
+  ifstream myfile(filename.c_str());
+  testBoundary.deserialize(myfile, ellipt.getMesh());
+  myfile.close();
+
+  boundaryTest(testBoundary.getOwn(), meshDist->getIntBoundary().getOwn());
+  boundaryTest(testBoundary.getOther(), meshDist->getIntBoundary().getOther());
+  boundaryTest(testBoundary.getPeriodic(), meshDist->getIntBoundary().getPeriodic());
+}
+
+
+int main(int argc, char **argv)
+{
+  AMDiS::init(argc, argv, "./init/test0005.dat.2d");
+
+  boost::unit_test::unit_test_main(&init_unit_test, argc, argv);
+
+  AMDiS::finalize();
+}
-- 
GitLab