diff --git a/AMDiS/src/AMDiS.h b/AMDiS/src/AMDiS.h
index 2df55bcdca8dd2f8fb20d8f163f62c73a0b51305..b71d46d65d26f40563a040c50a1819dd29293d0c 100644
--- a/AMDiS/src/AMDiS.h
+++ b/AMDiS/src/AMDiS.h
@@ -142,6 +142,7 @@
 #if HAVE_PARALLEL_DOMAIN_AMDIS
 #include "parallel/InteriorBoundary.h"
 #include "parallel/MpiHelper.h"
+#include "parallel/ParallelDebug.h"
 #include "parallel/StdMpi.h"
 
 #if HAVE_PARALLEL_MTL4
diff --git a/AMDiS/src/BoundaryObject.cc b/AMDiS/src/BoundaryObject.cc
index 435fc00285a165ead0979d170f662ebc35c0351a..7890608b3ba2d048817fb7c4ca16baa1e2b313f1 100644
--- a/AMDiS/src/BoundaryObject.cc
+++ b/AMDiS/src/BoundaryObject.cc
@@ -135,4 +135,11 @@ namespace AMDiS {
 	    type == other.type);
   }
 
+  bool AtomicBoundary::operator!=(const AtomicBoundary& other) const
+  {
+    return (rankObj != other.rankObj ||
+	    neighObj != other.neighObj ||
+	    type != other.type);
+  }
+
 }
diff --git a/AMDiS/src/BoundaryObject.h b/AMDiS/src/BoundaryObject.h
index 37f55b542cb4790484f52e773151cf3a40a07d21..d34cda375b7f12a5db7eeda672963d5b61914865 100644
--- a/AMDiS/src/BoundaryObject.h
+++ b/AMDiS/src/BoundaryObject.h
@@ -110,6 +110,8 @@ namespace AMDiS {
 
     bool operator==(const AtomicBoundary& other) const;
 
+    bool operator!=(const AtomicBoundary& other) const;
+
     /// The rank's part of the boundary.
     BoundaryObject rankObj;
 
diff --git a/AMDiS/src/Mesh.cc b/AMDiS/src/Mesh.cc
index 451b0703f2b82005a193e105fc87d239a4876ff2..a7f7862834e3deec6f16799abbfa5ca80a140711 100644
--- a/AMDiS/src/Mesh.cc
+++ b/AMDiS/src/Mesh.cc
@@ -1498,4 +1498,16 @@ namespace AMDiS {
 
     Element::deletedDOFs.clear();
   }
+
+
+  void Mesh::getElementIndexMap(map<int, Element*> &elIndexMap)
+  {
+    TraverseStack stack;
+    ElInfo *elInfo = stack.traverseFirst(this, -1, Mesh::CALL_EVERY_EL_PREORDER);
+    while (elInfo) {
+      Element *el = elInfo->getElement();
+      elIndexMap[el->getIndex()] = el;      
+      elInfo = stack.traverseNext(elInfo);
+    }
+  }
 }
diff --git a/AMDiS/src/Mesh.h b/AMDiS/src/Mesh.h
index 6d727bdfbcb54178422360f2282c2d0bc363e8ef..1bf6e97404efc6bdb9e13f9315fd22c5611dacc8 100644
--- a/AMDiS/src/Mesh.h
+++ b/AMDiS/src/Mesh.h
@@ -629,6 +629,10 @@ namespace AMDiS {
     }
 #endif
 
+    /// Creates a map for all elements in mesh that maps from element indices
+    /// to the corresponding pointers.
+    void getElementIndexMap(map<int, Element*> &elIndexMap);
+
   public:
     ///
     static const Flag FILL_NOTHING;
diff --git a/AMDiS/src/Serializer.h b/AMDiS/src/Serializer.h
index 4df563aab4a7973549d66e118fec35a9fb080ef5..531d8ba50089e6d3e3552ca85c8ec42ccf6502f8 100644
--- a/AMDiS/src/Serializer.h
+++ b/AMDiS/src/Serializer.h
@@ -37,6 +37,8 @@
 
 namespace AMDiS {
 
+  using namespace std;
+
   template<typename ProblemType>
   class Serializer : public FileWriterInterface
   {
@@ -64,7 +66,7 @@ namespace AMDiS {
     }
 
 
-    Serializer(ProblemType *prob, std::string filename, int writeEveryIth)
+    Serializer(ProblemType *prob, string filename, int writeEveryIth)
       : name(filename),
 	problem(prob),
 	tsModulo(writeEveryIth),
@@ -119,10 +121,10 @@ namespace AMDiS {
       }
 
 #if HAVE_PARALLEL_DOMAIN_AMDIS
-      filename += ".p" + boost::lexical_cast<std::string>(MPI::COMM_WORLD.Get_rank());
+      filename += ".p" + boost::lexical_cast<string>(MPI::COMM_WORLD.Get_rank());
 #endif
 
-      std::ofstream out(filename.c_str());
+      ofstream out(filename.c_str());
       TEST_EXIT(out.is_open())("Cannot open serialization file!\n");
       out.write(reinterpret_cast<const char*>(&amdisRevisionNumber), sizeof(int));
       problem->serialize(out);
@@ -134,7 +136,7 @@ namespace AMDiS {
 
   protected:
     /// Name of file to which the problem is serialized.
-    std::string name;
+    string name;
 
     /// Pointer to the problem.
     ProblemType *problem;
@@ -159,40 +161,40 @@ namespace AMDiS {
   namespace SerUtil {
 
     template<typename T>
-    void serialize(std::ostream& out, T& data)
+    void serialize(ostream& out, T& data)
     {
       out.write(reinterpret_cast<const char*>(&data), sizeof(T));
     }   
 
     template<typename T>
-    void deserialize(std::istream& in, T& data)
+    void deserialize(istream& in, T& data)
     {
       in.read(reinterpret_cast<char*>(&data), sizeof(T));
     }   
 
 
 
-    void serialize(std::ostream& out, DofEdge& data);
+    void serialize(ostream& out, DofEdge& data);
 
-    void deserialize(std::istream& in, DofEdge& data);
+    void deserialize(istream& in, DofEdge& data);
 
 
 
-    void serialize(std::ostream& out, DofFace& data);
+    void serialize(ostream& out, DofFace& data);
 
-    void deserialize(std::istream& in, DofFace& data);
+    void deserialize(istream& in, DofFace& data);
 
 
 
     template<typename T, typename U>
-    void serialize(std::ostream& out, std::pair<T, U>& data)
+    void serialize(ostream& out, pair<T, U>& data)
     {
       serialize(out, data.first);
       serialize(out, data.second);
     }
 
     template<typename T, typename U>
-    void deserialize(std::istream& in, std::pair<T, U>& data)
+    void deserialize(istream& in, pair<T, U>& data)
     {
       deserialize(in, data.first);
       deserialize(in, data.second);
@@ -201,11 +203,11 @@ namespace AMDiS {
 
 
     template<typename T>
-    void serialize(std::ostream& out, std::vector<T>& data)
+    void serialize(ostream& out, vector<T>& data)
     {
       int vecSize = data.size();
       serialize(out, vecSize);
-      for (typename std::vector<T>::iterator it = data.begin(); 
+      for (typename vector<T>::iterator it = data.begin(); 
 	   it != data.end(); ++it) {
 	T v = *it;
 	serialize(out, v);
@@ -213,7 +215,7 @@ namespace AMDiS {
     }
 
     template<typename T>
-    void deserialize(std::istream& in, std::vector<T>& data)
+    void deserialize(istream& in, vector<T>& data)
     {
       data.clear();
 
@@ -231,7 +233,7 @@ namespace AMDiS {
 
 
     template<typename T>
-    void serialize(std::ostream& out, std::set<T>& data)
+    void serialize(ostream& out, std::set<T>& data)
     {
       int setSize = data.size();
       serialize(out, setSize);
@@ -243,7 +245,7 @@ namespace AMDiS {
     }
 
     template<typename T>
-    void deserialize(std::istream& in, std::set<T>& data)
+    void deserialize(istream& in, std::set<T>& data)
     {
       data.clear();
 
@@ -260,12 +262,12 @@ namespace AMDiS {
 
 
     template<typename T1, typename T2>
-    void serialize(std::ostream& out, std::map<T1, T2>& data)
+    void serialize(ostream& out, map<T1, T2>& data)
     {
       int mapSize = data.size();
       serialize(out, mapSize);
 
-      for (typename std::map<T1,T2>::iterator it = data.begin(); 
+      for (typename map<T1,T2>::iterator it = data.begin(); 
 	   it != data.end(); ++it) {
 	T1 v1 = it->first;
 	T2 v2 = it->second;
@@ -275,7 +277,7 @@ namespace AMDiS {
     }
 
     template<typename T1, typename T2>
-    void deserialize(std::istream& in, std::map<T1, T2>& data)
+    void deserialize(istream& in, map<T1, T2>& data)
     {
       data.clear();
 
diff --git a/AMDiS/src/io/FileWriter.hh b/AMDiS/src/io/FileWriter.hh
index 58aff48d157094e655b3f071dd570a64ef9a340e..07f78fccde85e5b2391da9fb006c5e9f74166954 100644
--- a/AMDiS/src/io/FileWriter.hh
+++ b/AMDiS/src/io/FileWriter.hh
@@ -174,10 +174,10 @@ namespace AMDiS {
 
   template<typename T>
   void FileWriterTemplated<T>::writeFiles(AdaptInfo *adaptInfo,
-			      bool force,
-			      int level,
-			      Flag flag,
-			      bool (*writeElem)(ElInfo*))
+					  bool force,
+					  int level,
+					  Flag flag,
+					  bool (*writeElem)(ElInfo*))
   {
     FUNCNAME("FileWriterTemplated<T>::writeFiles()");
 
@@ -186,19 +186,19 @@ namespace AMDiS {
 
     // Containers, which store the data to be written;
     std::vector<DataCollector<T>*> dataCollectors(solutionVecs.size());
-
+    
     if (writeElem) {
       for (int i = 0; i < static_cast<int>(dataCollectors.size()); i++)
 	dataCollectors[i] = new DataCollector<T>(feSpace, solutionVecs[i], 
-					      level, flag, writeElem);
+						 level, flag, writeElem);
     } else {
       for (int i = 0; i < static_cast<int>(dataCollectors.size()); i++)
 	dataCollectors[i] = new DataCollector<T>(feSpace, solutionVecs[i], 
-					      traverseLevel, 
-					      flag | traverseFlag, 
-					      writeElement);
+						 traverseLevel, 
+						 flag | traverseFlag, 
+						 writeElement);
     }
-
+    
     std::string fn = filename;
 
 #if HAVE_PARALLEL_DOMAIN_AMDIS
diff --git a/AMDiS/src/parallel/DofComm.cc b/AMDiS/src/parallel/DofComm.cc
index 53d241c45b632d495cc64fc45e8933191f52ea26..52660d1b4c8c0b2158ef379821de368e10a4cab8 100644
--- a/AMDiS/src/parallel/DofComm.cc
+++ b/AMDiS/src/parallel/DofComm.cc
@@ -18,6 +18,30 @@ namespace AMDiS {
 
   using namespace std;
 
+
+  void DofComm::init(int level, 
+		     MeshLevelData &ld,
+		     vector<const FiniteElemSpace*> &fe)
+  {
+    FUNCNAME("DofComm::init()");
+    
+    meshLevel = level;
+    levelData = &ld;
+    feSpaces = fe;
+    
+    nLevel = levelData->getLevelNumber() - meshLevel;
+    TEST_EXIT_DBG(nLevel >= 1)("Should not happen!\n");
+
+    sendDofs.clear();
+    recvDofs.clear();
+    periodicDofs.clear();
+    
+    sendDofs.resize(nLevel);
+    recvDofs.resize(nLevel);
+    periodicDofs.resize(nLevel);
+  }
+
+
   void DofComm::create(InteriorBoundary &boundary)
   {
     createContainer(boundary.getOwn(), sendDofs);
@@ -28,6 +52,8 @@ namespace AMDiS {
   void DofComm::createContainer(RankToBoundMap &boundary,
 				LevelDataType &data)
   {
+    FUNCNAME("DofComm::createContainer()");
+
     // === Fill data. ===
 
     for (unsigned int i = 0; i < feSpaces.size(); i++)
diff --git a/AMDiS/src/parallel/DofComm.h b/AMDiS/src/parallel/DofComm.h
index 7e3a7bea4ec2890de2675de770b5b6998054bfe9..ffd747dd82951bb7f04ceb54c3f5a6caa23c7eea 100644
--- a/AMDiS/src/parallel/DofComm.h
+++ b/AMDiS/src/parallel/DofComm.h
@@ -38,7 +38,10 @@ namespace AMDiS {
     DofComm() 
       : recvDofs(1),
 	sendDofs(1),
-	periodicDofs(0)
+	periodicDofs(0),
+	meshLevel(-1),
+	nLevel(0),
+	levelData(NULL)
     {}
     
     typedef map<const FiniteElemSpace*, DofContainer> FeMapType;
@@ -48,23 +51,9 @@ namespace AMDiS {
     // meshLevel: map[rank -> map[feSpace -> DofContainer]]
     typedef vector<DataType> LevelDataType;
 
-    void init(int n, vector<const FiniteElemSpace*> &fe)
-    {
-      FUNCNAME("DofComm::init()");
-
-      TEST_EXIT_DBG(n >= 1)("Should not happen!\n");
-
-      nLevel = n;
-      feSpaces = fe;
-
-      sendDofs.clear();
-      recvDofs.clear();
-      periodicDofs.clear();
-
-      sendDofs.resize(nLevel);
-      recvDofs.resize(nLevel);
-      periodicDofs.resize(nLevel);
-    }
+    void init(int level, 
+	      MeshLevelData &levelData, 
+	      vector<const FiniteElemSpace*> &fe);
 
     void create(InteriorBoundary &boundary);
 
@@ -119,8 +108,12 @@ namespace AMDiS {
     /// considered here. 
     LevelDataType periodicDofs;
 
+    int meshLevel;
+
     int nLevel;
 
+    MeshLevelData *levelData;
+
     vector<const FiniteElemSpace*> feSpaces;
 
     friend class Iterator;
diff --git a/AMDiS/src/parallel/ElementObjectDatabase.cc b/AMDiS/src/parallel/ElementObjectDatabase.cc
index 5ab67ef92d18f369703b75139a2ad37feec7542b..930b5fef6ea9104660c61c6eea8ea700644dc441 100644
--- a/AMDiS/src/parallel/ElementObjectDatabase.cc
+++ b/AMDiS/src/parallel/ElementObjectDatabase.cc
@@ -579,35 +579,23 @@ namespace AMDiS {
     TEST_EXIT_DBG(macroElementRankMap)("Should not happen!\n");
 
     int owner = -1;
-
-    switch (iterGeoPos) {
-    case VERTEX:
-      {
-	vector<ElementObjectData>& vertexData = vertexElements[vertexIter->first];
-	for (vector<ElementObjectData>::iterator it = vertexData.begin();
-	     it != vertexData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    case EDGE:
-      {
-	vector<ElementObjectData>& edgeData = edgeElements[edgeIter->first];
-	for (vector<ElementObjectData>::iterator it = edgeData.begin();
-	     it != edgeData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    case FACE:
-      {
-      	vector<ElementObjectData>& faceData = faceElements[faceIter->first];
-	for (vector<ElementObjectData>::iterator it = faceData.begin();
-	     it != faceData.end(); ++it)
-	  owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
-      }
-      break;
-    default:
-	ERROR_EXIT("Should not happen!\n");
-    }    
+    vector<ElementObjectData> *objData;
+    
+   switch (iterGeoPos) {
+   case VERTEX:
+     objData = &(vertexElements[vertexIter->first]);
+     break;
+   case EDGE:
+     objData = &(edgeElements[edgeIter->first]);
+     break;
+   case FACE:
+     objData = &(faceElements[faceIter->first]);
+     break;
+   }
+
+   for (vector<ElementObjectData>::iterator it = objData->begin();
+	it != objData->end(); ++it)
+     owner = std::max(owner, (*macroElementRankMap)[it->elIndex]);
 
     return owner;
   }
@@ -641,10 +629,12 @@ namespace AMDiS {
       }
       break;
     case FACE:
-      vector<ElementObjectData>& faceData = faceElements[faceIter->first];
-      for (vector<ElementObjectData>::iterator it = faceData.begin();
-	   it != faceData.end(); ++it)
-	ranksInLevel[0].insert((*macroElementRankMap)[it->elIndex]);
+      {
+	vector<ElementObjectData>& faceData = faceElements[faceIter->first];
+	for (vector<ElementObjectData>::iterator it = faceData.begin();
+	     it != faceData.end(); ++it)
+	  ranksInLevel[0].insert((*macroElementRankMap)[it->elIndex]);
+      }
       break;
     default:
       ERROR_EXIT("Should not happen!\n");
diff --git a/AMDiS/src/parallel/InteriorBoundary.cc b/AMDiS/src/parallel/InteriorBoundary.cc
index b7547cc315fbea5d3f7b92aaa611beaa1300516c..863a5f79ed7533d41bab791380782c45ec256e62 100644
--- a/AMDiS/src/parallel/InteriorBoundary.cc
+++ b/AMDiS/src/parallel/InteriorBoundary.cc
@@ -19,6 +19,9 @@
 
 namespace AMDiS {
 
+  using namespace std;
+
+
   void InteriorBoundary::create(MPI::Intracomm &mpiComm,
 				ElementObjectDatabase &elObjDb)
   { 
@@ -345,13 +348,19 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::serialize(std::ostream &out)
+  void InteriorBoundary::serialize(ostream &out)
   {
-    FUNCNAME("InteriorBoundary::serialize()");
+    serialize(out, own);
+    serialize(out, other);
+    serialize(out, periodic);
+  }
 
-    ERROR_EXIT("REWRITE TO MULTILEVEL STRUCTURE!\n");
 
-#if 0
+  void InteriorBoundary::serialize(ostream &out,
+				   RankToBoundMap& boundary)
+  {
+    FUNCNAME("InteriorBoundary::serialize()");
+
     int mSize = boundary.size();
     SerUtil::serialize(out, mSize);
     for (RankToBoundMap::iterator it = boundary.begin(); 
@@ -380,18 +389,26 @@ namespace AMDiS {
 	SerUtil::serialize(out, bound.type);
       }
     }
-#endif
   }
 
 
-  void InteriorBoundary::deserialize(std::istream &in, 
-				     std::map<int, Element*> &elIndexMap)
+  void InteriorBoundary::deserialize(istream &in, Mesh *mesh)				     
   {
-    FUNCNAME("InteriorBoundary::deserialize()");
+    map<int, Element*> elIndexMap;
+    mesh->getElementIndexMap(elIndexMap);
+
+    deserialize(in, own, elIndexMap);
+    deserialize(in, other, elIndexMap);
+    deserialize(in, periodic, elIndexMap);
+  }
 
-    ERROR_EXIT("REWRITE TO MULTILEVEL STRUCTURE!\n");
 
-#if 0
+  void InteriorBoundary::deserialize(istream &in, 
+				     RankToBoundMap& boundary,
+				     map<int, Element*> &elIndexMap)
+  {
+    FUNCNAME("InteriorBoundary::deserialize()");
+
     int mSize = 0;
     SerUtil::deserialize(in, mSize);
     for (int i = 0; i < mSize; i++) {
@@ -431,14 +448,13 @@ namespace AMDiS {
 
 	// For the case of periodic interior boundaries, a rank may have an
 	// boundary with itself. In this case, also the pointer to the neighbour
-	//  object must be set correctly.
+	// object must be set correctly.
 	if (elIndexMap.count(bound.neighObj.elIndex))
 	  bound.neighObj.el = elIndexMap[bound.neighObj.elIndex];
 	else
 	  bound.neighObj.el = NULL;
       }
     }
-#endif
   }
 
 
@@ -466,7 +482,7 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::serializeExcludeList(std::ostream &out, 
+  void InteriorBoundary::serializeExcludeList(ostream &out, 
 					      ExcludeList &list)
   {
     int size = list.size();
@@ -478,7 +494,7 @@ namespace AMDiS {
   }
 
 
-  void InteriorBoundary::deserializeExcludeList(std::istream &in, 
+  void InteriorBoundary::deserializeExcludeList(istream &in, 
 						ExcludeList &list)
   {
     int size = 0;
@@ -492,7 +508,7 @@ namespace AMDiS {
 
       SerUtil::deserialize(in, a);
       SerUtil::deserialize(in, b);
-      list.push_back(std::make_pair(a, b));
+      list.push_back(make_pair(a, b));
     }
   }
 
diff --git a/AMDiS/src/parallel/InteriorBoundary.h b/AMDiS/src/parallel/InteriorBoundary.h
index 0695df09d9b8badf6917fc03a8d9885b77db10f2..293b854be6470fb34134e9ee0fda8928d2684d56 100644
--- a/AMDiS/src/parallel/InteriorBoundary.h
+++ b/AMDiS/src/parallel/InteriorBoundary.h
@@ -44,12 +44,6 @@ namespace AMDiS {
     void create(MPI::Intracomm &mpiComm,
 		ElementObjectDatabase &elObjDb);
 
-    /// Writes this object to a file.
-    void serialize(ostream &out);
-
-    /// Reads the state of an interior boundary from a file.
-    void deserialize(istream &in, map<int, Element*> &elIndexMap);
-
     RankToBoundMap& getOwn()
     {
       return own;
@@ -70,6 +64,12 @@ namespace AMDiS {
       return static_cast<bool>(periodic.size());
     }
 
+    /// Writes this object to a file.
+    void serialize(ostream &out);
+
+    /// Reads the state of an interior boundary from a file.
+    void deserialize(istream &in, Mesh *mesh);
+
   private:
     AtomicBoundary& getNewOwn(int rank);
 
@@ -77,6 +77,12 @@ namespace AMDiS {
 
     AtomicBoundary& getNewPeriodic(int rank);
 
+    void serialize(ostream &out, RankToBoundMap& boundary);
+
+    void deserialize(istream &in, 
+		     RankToBoundMap& boundary,
+		     map<int, Element*> &elIndexMap);
+
     void serializeExcludeList(ostream &out, ExcludeList &list);
 
     void deserializeExcludeList(istream &in, ExcludeList &list);    
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 8dc62837e905ea9f82719b362d5665d355d1462f..fa91fcb0e8e473394e1426037954881934e02f3e 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -228,12 +228,8 @@ namespace AMDiS {
     createMeshLevelStructure();
 
     // Create interior boundary information.
-    createInteriorBoundaryInfo();
+    createInteriorBoundary(true);
 
-#if (DEBUG != 0)    
-    ParallelDebug::printBoundaryInfo(*this);
-#endif
-    
     // === Remove neighbourhood relations due to periodic bounday conditions. ===
 
     for (deque<MacroElement*>::iterator it = mesh->firstMacroElement();
@@ -1438,7 +1434,7 @@ namespace AMDiS {
     mesh->dofCompress();
     partitioner->createPartitionMap(partitionMap);
 
-    updateInteriorBoundaryInfo();
+    createInteriorBoundary(false);
     updateLocalGlobalNumbering();
 
     
@@ -1509,19 +1505,12 @@ namespace AMDiS {
   }
 
 
-  void MeshDistributor::createInteriorBoundaryInfo()
+  void MeshDistributor::createInteriorBoundary(bool firstCall)
   {
-    FUNCNAME("MeshDistributor::createInteriorBoundaryInfo()");
-
-    elObjDb.create(partitionMap, levelData);
-    elObjDb.updateRankData();
-    intBoundary.create(mpiComm, elObjDb);
-  }
+    FUNCNAME("MeshDistributor::createInteriorBoundary()");
 
-
-  void MeshDistributor::updateInteriorBoundaryInfo()
-  {
-    FUNCNAME("MeshDistributor::updateInteriorBoundaryInfo()");
+    if (firstCall)
+      elObjDb.create(partitionMap, levelData);
 
     elObjDb.updateRankData();
     intBoundary.create(mpiComm, elObjDb);
@@ -1536,17 +1525,18 @@ namespace AMDiS {
   {
     FUNCNAME("MeshDistributor::createBoundaryDofs()");
 
-    dofComm.init(levelData.getLevelNumber(), feSpaces);
-    dofComm.create(intBoundary);
-
-    createBoundaryDofInfo();
-  }
+    // === Create DOF communicator. ===
 
+    dofComm.init(0, levelData, feSpaces);
+    dofComm.create(intBoundary);
 
-  void MeshDistributor::createBoundaryDofInfo()
-  {
-    FUNCNAME("MeshDistributor::createBoundaryDofInfo()");
+    if (levelData.getLevelNumber() > 1) {
+      dofCommSd.init(1, levelData, feSpaces);
+      dofCommSd.create(intBoundary);
+    }
 
+    // === If requested, create more information on communication DOFs. ===
+    
     if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED))
       return;
 
@@ -1960,24 +1950,17 @@ namespace AMDiS {
     SerUtil::deserialize(in, elemWeights);
     SerUtil::deserialize(in, partitionMap);
 
-    // Create two maps: one from from element indices to the corresponding element 
-    // pointers, and one map from Dof indices to the corresponding dof pointers.
-    map<int, Element*> elIndexMap;
+    // Create a map from DOF indices to the corresponding  DOF pointers.
     map<const FiniteElemSpace*, map<int, const DegreeOfFreedom*> > dofIndexMap;
     for (unsigned int i = 0; i < feSpaces.size(); i++) {
       ElementDofIterator elDofIter(feSpaces[i]);
       TraverseStack stack;
-      ElInfo *elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_EVERY_EL_PREORDER);
+      ElInfo *elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_LEAF_EL);
       while (elInfo) {
-	Element *el = elInfo->getElement();
-	elIndexMap[el->getIndex()] = el;
-	
-	if (el->isLeaf()) {
-	  elDofIter.reset(el);
-	  do {
-	    dofIndexMap[feSpaces[i]][elDofIter.getDof()] = elDofIter.getDofPtr();
-	  } while (elDofIter.next());      
-	}
+	elDofIter.reset(elInfo->getElement());
+	do {
+	  dofIndexMap[feSpaces[i]][elDofIter.getDof()] = elDofIter.getDofPtr();
+	} while (elDofIter.next());      
 	
 	elInfo = stack.traverseNext(elInfo);
       }
@@ -1985,7 +1968,7 @@ namespace AMDiS {
 
     elObjDb.deserialize(in);
    
-    intBoundary.deserialize(in, elIndexMap);
+    intBoundary.deserialize(in, mesh);
 
     dofComm.deserialize(in, dofIndexMap);
 
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index 4299edf77c4ff976c0240c57e5215ecb1f770429..24dfbd4fc0d359222786785efd72ca26fc01bc8a 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -162,6 +162,11 @@ namespace AMDiS {
       return dofComm;
     }
 
+    InteriorBoundary& getIntBoundary()
+    {
+      return intBoundary;
+    }
+
     inline long getLastMeshChangeIndex()
     {
       return lastMeshChangeIndex;
@@ -282,14 +287,10 @@ namespace AMDiS {
 
     /// Determines the interior boundaries, i.e. boundaries between ranks, and
     /// stores all information about them in \ref interiorBoundary.
-    void createInteriorBoundaryInfo();
-
-    void updateInteriorBoundaryInfo();
+    void createInteriorBoundary(bool firstCall);
 
     void createBoundaryDofs();
 
-    void createBoundaryDofInfo();
-
     /// Removes all macro elements from the mesh that are not part of ranks 
     /// partition.
     void removeMacroElements();
@@ -472,6 +473,8 @@ namespace AMDiS {
 
     DofComm dofComm;
 
+    DofComm dofCommSd;
+
     PeriodicMap periodicMap;
 
     /// This set of values must be interchanged between ranks when the mesh is 
diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc
index a8ed8ee6190e3d509db64dd192e05587edc445eb..b1ff9947c465460e7a5e425ab766c45d11a9b5a6 100644
--- a/AMDiS/src/parallel/ParallelDebug.cc
+++ b/AMDiS/src/parallel/ParallelDebug.cc
@@ -715,16 +715,19 @@ namespace AMDiS {
   }
 
 
-  void ParallelDebug::printBoundaryInfo(MeshDistributor &pdb)
+  void ParallelDebug::printBoundaryInfo(MeshDistributor &pdb, 
+					int level, 
+					bool force)
   {
     FUNCNAME("ParallelDebug::printBoundaryInfo()");
 
     int tmp = 0;
     Parameters::get("parallel->debug->print boundary info", tmp);
-    if (tmp <= 0)
+    if (tmp <= 0 && force == false)
       return;
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.own); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.own, level); 
+	 !it.end(); ++it) {
       MSG("Rank owned boundary with rank %d: \n", it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
 	  it->rankObj.elIndex, it->rankObj.subObj, it->rankObj.ithObj);
@@ -732,7 +735,8 @@ namespace AMDiS {
 	  it->neighObj.elIndex, it->neighObj.subObj, it->neighObj.ithObj);
     }
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.other); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.other, level); 
+	 !it.end(); ++it) {
       MSG("Other owned boundary with rank %d: \n", it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
 	  it->rankObj.elIndex, it->rankObj.subObj, it->rankObj.ithObj);
@@ -740,7 +744,8 @@ namespace AMDiS {
 	  it->neighObj.elIndex, it->neighObj.subObj, it->neighObj.ithObj);
     }
 
-    for (InteriorBoundary::iterator it(pdb.intBoundary.periodic); !it.end(); ++it) {
+    for (InteriorBoundary::iterator it(pdb.intBoundary.periodic, level); 
+	 !it.end(); ++it) {
       MSG("Periodic boundary (ID %d) with rank %d: \n", 
 	  it->type, it.getRank());
       MSG("  ranks obj-ind: %d  sub-obj: %d   ith-obj: %d\n",
diff --git a/AMDiS/src/parallel/ParallelDebug.h b/AMDiS/src/parallel/ParallelDebug.h
index 5536f8024572352a09b1febbe479b376e4d6ecab..ee87bfc4b19ab993bcaf5308e8baba2b4603680a 100644
--- a/AMDiS/src/parallel/ParallelDebug.h
+++ b/AMDiS/src/parallel/ParallelDebug.h
@@ -62,22 +62,22 @@ namespace AMDiS {
 				     const FiniteElemSpace *feSpace);
 
     /** \brief
-     * This function is used for debugging only. It traverses all interior boundaries
-     * and compares the DOF indices on them with the DOF indices of the boundarys
-     * neighbours. The function fails, when DOF indices on an interior boundary do
-     * not fit together.
+     * This function is used for debugging only. It traverses all interior
+     * boundaries and compares the DOF indices on them with the DOF indices of
+     * the boundarys neighbours. The function fails, when DOF indices on an
+     * interior boundary do not fit together.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
-     * \param[in]  printCoords   If true, the coords of all common dofs are printed
-     *                           to the screen.
+     * \param[in]  printCoords   If true, the coords of all common dofs are
+     *                           printed to the screen.
      */
     static void testCommonDofs(MeshDistributor &pdb, bool printCoords = false);
 
     /** \brief
-     * This function is used for debugging only. It checks if on all ranks DOFs with
-     * the same coordinates have the same global index. For this, the function genartes
-     * on all ranks a list of all DOFs with their coordinates and global indices and 
-     * sends the list to all neighbouring ranks.
+     * This function is used for debugging only. It checks if on all ranks DOFs
+     * with the same coordinates have the same global index. For this, the
+     * function genartes on all ranks a list of all DOFs with their coordinates
+     * and global indices and  sends the list to all neighbouring ranks.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
      */
@@ -122,14 +122,14 @@ namespace AMDiS {
     static void printMapPeriodic(MeshDistributor &pdb, int rank = -1);
 
     /** \brief
-     * This function is used for debugging only. It prints information about dofs
+     * This function is used for debugging only. It prints information about DOFs
      * in rank's partition.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
-     * \param[in]  rank          If specified, only the information from the given 
-     *                           rank is printed.
-     * \param[in]  rankDofs      List of all dofs in ranks partition that are owned 
-     *                           by rank.
+     * \param[in]  rank          If specified, only the information from the
+     *                           given rank is printed.
+     * \param[in]  rankDofs      List of all dofs in ranks partition that are
+     *                           owned by rank.
      * \param[in]  rankAllDofs   List of all dofs in ranks partition.
      */
     static void printRankDofs(MeshDistributor &pdb,
@@ -138,28 +138,38 @@ namespace AMDiS {
 			      DofContainer& rankAllDofs);
 
     /** \brief
-     * This functions prints all information about all interior boundaries on all ranks.
+     * This functions prints all information about all interior boundaries on 
+     * all ranks.
      *
      * \param[in]  pdb           Parallel problem definition used for debugging.
+     * \param[in]  level         Mesh level number for which the boundary should
+     *                           be printed.
+     * \param[in]  force         If true, the information is always printed to
+     *                           screen. Otherwise, this is done only if AMDiS
+     *                           is compiled in debug mode or if the init file
+     *                           parameter "parallel->debug->print boundary info"
+     *                           is set.
      */
-    static void printBoundaryInfo(MeshDistributor &pdb);
+    static void printBoundaryInfo(MeshDistributor &pdb, 
+				  int level = 0, 
+				  bool force = false);
 
     static void writeDebugFile(MeshDistributor &pdb,
 			       std::string prefix, std::string postfix);
 
     /** \brief
-     * This functions create a Paraview file with the macro mesh where the elements
-     * are colored by the partition they are part of.
+     * This functions create a Paraview file with the macro mesh where the
+     * elements are colored by the partition they are part of.
      */
     static void writePartitioning(MeshDistributor &pdb, string filename);
 
     /** \brief
-     * The mesh is written to a value and all values are assigned by the rank number
-     * where the vertex is contained in.
+     * The mesh is written to a value and all values are assigned by the rank
+     * number where the vertex is contained in.
      *
      * \param[in]  filename    Name of the output file without extension (.vtu).
-     * \param[in]  counter     Counter index. If not negative, this number is added
-     *                         to the filename.
+     * \param[in]  counter     Counter index. If not negative, this number is
+     *                         added to the filename.
      * \param[in]  feSpace
      */
     static void writePartitioningFile(std::string filename,
diff --git a/test/mpi/data/data0002a b/test/mpi/data/data0002a
new file mode 100644
index 0000000000000000000000000000000000000000..6e844b2f4343946a869d7dbceb0f92a8a4a0dcff
--- /dev/null
+++ b/test/mpi/data/data0002a
@@ -0,0 +1,17 @@
+# rank_id primals duals dofs start_dofs overall_dofs
+0 0 2 4 0 81
+1 1 3 4 4 81
+2 1 3 4 8 81
+3 1 2 6 12 81
+4 1 3 4 18 81
+5 1 4 4 22 81
+6 1 4 4 26 81
+7 2 3 6 30 81
+8 1 3 4 36 81
+9 1 4 4 40 81
+10 1 4 4 44 81
+11 2 3 6 48 81
+12 1 2 6 54 81
+13 2 3 6 60 81
+14 2 3 6 66 81
+15 3 2 9 72 81
diff --git a/test/mpi/data/data0002b b/test/mpi/data/data0002b
new file mode 100644
index 0000000000000000000000000000000000000000..cac4170557b313346d907e6ba17668cf46b93094
--- /dev/null
+++ b/test/mpi/data/data0002b
@@ -0,0 +1,16 @@
+0 0 30 256 0 4225
+1 1 45 256 256 4225
+2 1 45 256 512 4225
+3 1 30 272 768 4225
+4 1 45 256 1040 4225
+5 1 60 256 1296 4225
+6 1 60 256 1552 4225
+7 2 45 272 1808 4225
+8 1 45 256 2080 4225
+9 1 60 256 2336 4225
+10 1 60 256 2592 4225
+11 2 45 272 2848 4225
+12 1 30 272 3120 4225
+13 2 45 272 3392 4225
+14 2 45 272 3664 4225
+15 3 30 289 3936 4225
diff --git a/test/mpi/data/data0004a b/test/mpi/data/data0004a
new file mode 100644
index 0000000000000000000000000000000000000000..99136fa9bfdea59e4a996b618f4a4124c781a0eb
--- /dev/null
+++ b/test/mpi/data/data0004a
@@ -0,0 +1,33 @@
+# rank_id level_id send_dofs recv_dof
+0 0 0 3
+0 1 0 0
+1 0 1 3
+1 1 0 2
+2 0 1 3
+2 1 1 1
+3 0 1 2
+3 1 0 0
+4 0 1 3
+4 1 0 2
+5 0 1 3
+5 1 0 3
+6 0 1 3
+6 1 1 2
+7 0 2 2
+7 1 0 2
+8 0 1 3
+8 1 1 1
+9 0 1 3
+9 1 1 2
+10 0 1 3
+10 1 1 2
+11 0 2 2
+11 1 2 0
+12 0 1 2
+12 1 0 0
+13 0 2 2
+13 1 0 2
+14 0 2 2
+14 1 2 0
+15 0 3 0
+15 1 0 0
diff --git a/test/mpi/data/data0005/interior_boundary_p0.ser b/test/mpi/data/data0005/interior_boundary_p0.ser
new file mode 100644
index 0000000000000000000000000000000000000000..c230d070cb5668e1c088410570479ed22a04ef0d
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p0.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p1.ser b/test/mpi/data/data0005/interior_boundary_p1.ser
new file mode 100644
index 0000000000000000000000000000000000000000..e761ef7007126b2425b681804a5be5060226477d
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p1.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p10.ser b/test/mpi/data/data0005/interior_boundary_p10.ser
new file mode 100644
index 0000000000000000000000000000000000000000..9b96093fcea131e02e930cce55756f5f398c58c4
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p10.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p11.ser b/test/mpi/data/data0005/interior_boundary_p11.ser
new file mode 100644
index 0000000000000000000000000000000000000000..b609dde15d8df081332b8559196ce595e8e34085
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p11.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p12.ser b/test/mpi/data/data0005/interior_boundary_p12.ser
new file mode 100644
index 0000000000000000000000000000000000000000..fbbaf66529e4c006aa747fb4f96a3537255a3493
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p12.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p13.ser b/test/mpi/data/data0005/interior_boundary_p13.ser
new file mode 100644
index 0000000000000000000000000000000000000000..2321c99fb0e81c4966a733a9448d4baeee7f27e6
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p13.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p14.ser b/test/mpi/data/data0005/interior_boundary_p14.ser
new file mode 100644
index 0000000000000000000000000000000000000000..cc4ecf931e4c60fc9e2637c277dac6d0e8a0f547
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p14.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p15.ser b/test/mpi/data/data0005/interior_boundary_p15.ser
new file mode 100644
index 0000000000000000000000000000000000000000..7c839a8ae54d482cb748ff0cfaade00d5f148445
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p15.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p2.ser b/test/mpi/data/data0005/interior_boundary_p2.ser
new file mode 100644
index 0000000000000000000000000000000000000000..a092854f7310131479f73841b3527d9bf6dc59cc
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p2.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p3.ser b/test/mpi/data/data0005/interior_boundary_p3.ser
new file mode 100644
index 0000000000000000000000000000000000000000..5a54c7ebbff83cf11d48807c413891f33ec56411
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p3.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p4.ser b/test/mpi/data/data0005/interior_boundary_p4.ser
new file mode 100644
index 0000000000000000000000000000000000000000..06fc02c92bc368b896d364ba1091c9d37b1e5a27
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p4.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p5.ser b/test/mpi/data/data0005/interior_boundary_p5.ser
new file mode 100644
index 0000000000000000000000000000000000000000..2144bb7c888416c787b6682e3a897c7f453c26a3
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p5.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p6.ser b/test/mpi/data/data0005/interior_boundary_p6.ser
new file mode 100644
index 0000000000000000000000000000000000000000..1c5b3568d2b0bc55612309217199d86f8605c8fe
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p6.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p7.ser b/test/mpi/data/data0005/interior_boundary_p7.ser
new file mode 100644
index 0000000000000000000000000000000000000000..85239548e96dd77d26e94833484d3f6b73511c23
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p7.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p8.ser b/test/mpi/data/data0005/interior_boundary_p8.ser
new file mode 100644
index 0000000000000000000000000000000000000000..887f19bac40fc774ea9797238e29c05ab873e854
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p8.ser differ
diff --git a/test/mpi/data/data0005/interior_boundary_p9.ser b/test/mpi/data/data0005/interior_boundary_p9.ser
new file mode 100644
index 0000000000000000000000000000000000000000..c5faae49ef912126acd87a9481f4b224d88a89c3
Binary files /dev/null and b/test/mpi/data/data0005/interior_boundary_p9.ser differ
diff --git a/test/mpi/init/test0004.dat.2d b/test/mpi/init/test0004.dat.2d
new file mode 100644
index 0000000000000000000000000000000000000000..379825ba905b5743ffeb6f8cc408dcdd32fa894d
--- /dev/null
+++ b/test/mpi/init/test0004.dat.2d
@@ -0,0 +1,25 @@
+dimension of world:             2
+
+elliptMesh->macro file name:    ./macro/macro.stand.p16.2d
+elliptMesh->global refinements: 1
+
+ellipt->mesh:                   elliptMesh
+ellipt->dim:                    2
+ellipt->components:             1
+ellipt->polynomial degree[0]:   1
+ 
+ellipt->solver:                 cg
+ellipt->solver->max iteration:  10
+ellipt->solver->tolerance:      1.e-8
+ellipt->solver->info:           10
+ellipt->solver->left precon:    diag
+ellipt->solver->right precon:   no
+
+ellipt->estimator[0]:              0
+ellipt->marker[0]->strategy:       0 
+
+parallel->log main rank:     0
+parallel->pre refine:        0
+parallel->partitioner:       checker
+parallel->multi level test:  1
+parallel->solver:            petsc-feti
diff --git a/test/mpi/init/test0005.dat.2d b/test/mpi/init/test0005.dat.2d
new file mode 100644
index 0000000000000000000000000000000000000000..379825ba905b5743ffeb6f8cc408dcdd32fa894d
--- /dev/null
+++ b/test/mpi/init/test0005.dat.2d
@@ -0,0 +1,25 @@
+dimension of world:             2
+
+elliptMesh->macro file name:    ./macro/macro.stand.p16.2d
+elliptMesh->global refinements: 1
+
+ellipt->mesh:                   elliptMesh
+ellipt->dim:                    2
+ellipt->components:             1
+ellipt->polynomial degree[0]:   1
+ 
+ellipt->solver:                 cg
+ellipt->solver->max iteration:  10
+ellipt->solver->tolerance:      1.e-8
+ellipt->solver->info:           10
+ellipt->solver->left precon:    diag
+ellipt->solver->right precon:   no
+
+ellipt->estimator[0]:              0
+ellipt->marker[0]->strategy:       0 
+
+parallel->log main rank:     0
+parallel->pre refine:        0
+parallel->partitioner:       checker
+parallel->multi level test:  1
+parallel->solver:            petsc-feti
diff --git a/test/mpi/src/test0005.cc b/test/mpi/src/test0005.cc
new file mode 100644
index 0000000000000000000000000000000000000000..a136c26b0a16e759e65979c2d117ea71460c3c84
--- /dev/null
+++ b/test/mpi/src/test0005.cc
@@ -0,0 +1,66 @@
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE 0005
+#define BOOST_TEST_NO_MAIN
+
+#include <boost/test/unit_test.hpp>
+#include <AMDiS.h>
+
+using namespace AMDiS;
+using namespace std;
+
+/*
+  - 
+*/
+
+
+void boundaryTest(RankToBoundMap &map0, RankToBoundMap &map1)
+{
+  BOOST_CHECK_MESSAGE(map0.size() == map1.size(), 
+		      "Boundaries have different size!\n");
+
+  for (RankToBoundMap::iterator it = map0.begin(); it != map0.end(); ++it) {
+    if (map1.count(it->first) == 0) {
+      MSG("Boundary with rank %d missing!\n", it->first);
+      BOOST_ERROR("");
+    }
+
+    BOOST_CHECK_MESSAGE(it->second.size() == map1[it->first].size(), 
+			"Wrong boundary size!\n");
+
+    for (unsigned int i = 0; i < it->second.size(); i++)
+      BOOST_CHECK_MESSAGE(it->second[i] == map1[it->first][i],
+			  "Boundary is not equal!\n");
+  }
+}
+
+BOOST_AUTO_TEST_CASE(amdis_mpi_feti_multilevel)
+{
+  BOOST_REQUIRE(MPI::COMM_WORLD.Get_size() == 16);
+
+  ProblemStat ellipt("ellipt");
+  ellipt.initialize(INIT_ALL);
+
+  MeshDistributor *meshDist = MeshDistributor::globalMeshDistributor;
+  meshDist->initParallelization();
+  
+  InteriorBoundary testBoundary;  
+  string filename = "data/data0005/interior_boundary_p" + 
+    lexical_cast<string>(MPI::COMM_WORLD.Get_rank()) + ".ser";
+  ifstream myfile(filename.c_str());
+  testBoundary.deserialize(myfile, ellipt.getMesh());
+  myfile.close();
+
+  boundaryTest(testBoundary.getOwn(), meshDist->getIntBoundary().getOwn());
+  boundaryTest(testBoundary.getOther(), meshDist->getIntBoundary().getOther());
+  boundaryTest(testBoundary.getPeriodic(), meshDist->getIntBoundary().getPeriodic());
+}
+
+
+int main(int argc, char **argv)
+{
+  AMDiS::init(argc, argv, "./init/test0005.dat.2d");
+
+  boost::unit_test::unit_test_main(&init_unit_test, argc, argv);
+
+  AMDiS::finalize();
+}