diff --git a/AMDiS/src/parallel/DofComm.cc b/AMDiS/src/parallel/DofComm.cc
index 0ec8f96357cf1038d3fecf9996f958968ad7e589..ecb568199760fcef8894e8362c105dbb95a5a7fc 100644
--- a/AMDiS/src/parallel/DofComm.cc
+++ b/AMDiS/src/parallel/DofComm.cc
@@ -46,10 +46,7 @@ namespace AMDiS {
   {
     FUNCNAME("DofComm::create()");
 
-    MSG("---> senddofs\n");
     createContainer(boundary.getOwn(), sendDofs);
-
-    MSG("---> recvdofs\n");
     createContainer(boundary.getOther(), recvDofs);
   }
 
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 3786a0d9529750105ee35372f70ab6527e028ef0..f7700e1f29f9dcbd7b60321214ef746c0eb51601 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -1536,7 +1536,6 @@ namespace AMDiS {
     dofComm.create(intBoundary);
 
     if (levelData.getLevelNumber() > 1) {
-      MSG("START CREATE DOF-COMM-SD\n");
       dofCommSd.init(0, levelData, feSpaces);
       dofCommSd.create(intBoundarySd);
     }
@@ -1609,6 +1608,8 @@ namespace AMDiS {
   {
     FUNCNAME("MeshDistributor::updateLocalGlobalNumbering()");
 
+    mesh->dofCompress();
+
 #if (DEBUG != 0)
     debug::ElementIdxToDofs elMap;
     debug::createSortedDofs(mesh, elMap);   
@@ -1618,17 +1619,37 @@ namespace AMDiS {
     TEST_EXIT_DBG(nLevels >= 1)("Should not happen!\n");
 
     dofMap.init(levelData, feSpaces, feSpaces, true, true);
+    dofMap.setMpiComm(levelData.getMpiComm(0), 0);
     dofMap.setDofComm(dofComm);
     dofMap.clear();
 
+    if (nLevels > 1) {
+      dofMapSd.init(levelData, feSpaces, feSpaces, true, true);
+      dofMapSd.setMpiComm(levelData.getMpiComm(1), 1);
+      dofMapSd.setDofComm(dofCommSd);
+      dofMapSd.clear();
+    }
+
     createBoundaryDofs();
 
     for (unsigned int i = 0; i < feSpaces.size(); i++)
-      updateLocalGlobalNumbering(feSpaces[i]);
-
+      updateLocalGlobalNumbering(dofMap, dofComm, feSpaces[i]);
     dofMap.update();
 
+    if (nLevels > 1) {
+      for (unsigned int i = 0; i < feSpaces.size(); i++)
+ 	updateLocalGlobalNumbering(dofMapSd, dofCommSd, feSpaces[i]);
+      dofMapSd.update();
+    }
+
+    // === Update DOF admins due to new number of DOFs. ===
+  
+    lastMeshChangeIndex = mesh->getChangeIndex();
+
+
 #if (DEBUG != 0)
+    ParallelDebug::testDofContainerCommunication(*this);
+
     MSG("------------- Debug information -------------\n");
     MSG("|  number of levels:         %d\n", nLevels);
     MSG("|  number of FE spaces:      %d\n", feSpaces.size());
@@ -1642,9 +1663,19 @@ namespace AMDiS {
       }
     }
 
-    stringstream oss;
-    oss << debugOutputDir << "elementIndex-" << mpiRank << ".vtu";
-    debug::writeElementIndexMesh(mesh, oss.str());
+    if (nLevels > 1) {
+      for (int level = 0; level < nLevels; level++) {
+	for (unsigned int i = 0; i < feSpaces.size(); i++) {
+	  MSG("|  level = %d   FE space = %d:\n", level, i);
+	  MSG("|      nRankDofs    = %d\n", dofMapSd[feSpaces[i]].nRankDofs[level]);
+	  MSG("|      nOverallDofs = %d\n", dofMapSd[feSpaces[i]].nOverallDofs[level]);
+	  MSG("|      rStartDofs   = %d\n", dofMapSd[feSpaces[i]].rStartDofs[level]);
+	}
+      }
+    }
+
+    debug::writeElementIndexMesh(mesh, debugOutputDir + "elementIndex-" + 
+				 lexical_cast<string>(mpiRank) + ".vtu");
     ParallelDebug::writeDebugFile(*this, debugOutputDir + "mpi-dbg", "dat");
     debug::testSortedDofs(mesh, elMap);
     ParallelDebug::testCommonDofs(*this, true);
@@ -1658,12 +1689,12 @@ namespace AMDiS {
   }
 
 
-  void MeshDistributor::updateLocalGlobalNumbering(const FiniteElemSpace *feSpace)
+  void MeshDistributor::updateLocalGlobalNumbering(ParallelDofMapping &dmap,
+						   DofComm &dcom,
+						   const FiniteElemSpace *feSpace)
   {
     FUNCNAME("MeshDistributor::updateLocalGlobalNumbering()");
 
-    mesh->dofCompress();
-
     // === Get all DOFs in ranks partition. ===
 
     std::set<const DegreeOfFreedom*> rankDofSet;
@@ -1677,29 +1708,19 @@ namespace AMDiS {
     int nLevels = levelData.getLevelNumber();
     for (int level = 0; level < nLevels; level++) {
       DofContainerSet nonRankDofs;
-      for (DofComm::Iterator it(dofComm.getRecvDofs(), level, feSpace); 
+      for (DofComm::Iterator it(dcom.getRecvDofs(), level, feSpace); 
 	   !it.end(); it.nextRank())
 	for (; !it.endDofIter(); it.nextDof())
 	  nonRankDofs.insert(it.getDof());
       
       for (unsigned int i = 0; i < rankDofs.size(); i++)
 	if (nonRankDofs.count(rankDofs[i]) == 0)
-	  dofMap[feSpace].insertRankDof(level, *(rankDofs[i]));
+	  dmap[feSpace].insertRankDof(level, *(rankDofs[i]));
       
-      for (DofComm::Iterator it(dofComm.getRecvDofs(), level, feSpace); 
-	   !it.end(); it.nextRank())
-	for (; !it.endDofIter(); it.nextDof())
-	  dofMap[feSpace].insertNonRankDof(level, it.getDofIndex());
+      for (DofContainerSet::iterator it = nonRankDofs.begin();
+	   it != nonRankDofs.end(); ++it)
+	dmap[feSpace].insertNonRankDof(level, **it);
     }
-
-    // === Update DOF admins due to new number of DOFs. ===
-  
-    lastMeshChangeIndex = mesh->getChangeIndex();
-
-
-#if (DEBUG != 0)
-    ParallelDebug::testDofContainerCommunication(*this);
-#endif
   }
 
 
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index c0a2d1ab7fa8dd99ccd1983a569d3ba43b071d13..9d49c7600824c179f9e503af814f665f87b5c29f 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -309,7 +309,9 @@ namespace AMDiS {
 
     /// Updates the local and global DOF numbering after the mesh has been 
     /// changed.
-    void updateLocalGlobalNumbering(const FiniteElemSpace *feSpace);
+    void updateLocalGlobalNumbering(ParallelDofMapping &dmap,
+				    DofComm &dcom,
+				    const FiniteElemSpace *feSpace);
 
     /// Calls \ref createPeriodicMap(feSpace) for all FE spaces that are
     /// handled by the mesh distributor.
@@ -473,6 +475,8 @@ namespace AMDiS {
     /// Mapping object to map from local DOF indices to global ones.
     ParallelDofMapping dofMap;
 
+    ParallelDofMapping dofMapSd;
+
     /// Database to store and query all sub-objects of all elements of the 
     /// macro mesh.
     ElementObjectDatabase elObjDb;
diff --git a/AMDiS/src/parallel/ParallelDofMapping.cc b/AMDiS/src/parallel/ParallelDofMapping.cc
index 2673338a09580829ca90ab46c855cd73b0c24a87..872b4a70d5c9d9f13e2f2994a6876df889e3be4e 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.cc
+++ b/AMDiS/src/parallel/ParallelDofMapping.cc
@@ -55,7 +55,7 @@ namespace AMDiS {
       
       nOverallDofs[i] = 0;
       rStartDofs[i] = 0;
-      mpi::getDofNumbering(levelData->getMpiComm(0), 
+      mpi::getDofNumbering(mpiComm,
 			   nRankDofs[i], rStartDofs[i], nOverallDofs[i]);
       
       // === If required, compute also the global indices. ===
@@ -89,13 +89,20 @@ namespace AMDiS {
     // === Send all global indices of DOFs that are owned by the rank to all ===
     // === other ranks that also include this DOF.                           ===
 
-    StdMpi<vector<int> > stdMpi(levelData->getMpiComm(0));
+    StdMpi<vector<int> > stdMpi(mpiComm);
 
     for (DofComm::Iterator it(dofComm->getSendDofs(), level, feSpace); 
-	 !it.end(); it.nextRank())
+	 !it.end(); it.nextRank()) {
+      int rank = it.getRank();
+      if (meshLevel > 0)
+	rank = levelData->mapRank(rank, 0, meshLevel);
+
       for (; !it.endDofIter(); it.nextDof())
-	if (dofMap[level].count(it.getDofIndex()) && !nonRankDofs[level].count(it.getDofIndex()))
-	  stdMpi.getSendData(it.getRank()).push_back(dofMap[level][it.getDofIndex()].global);
+	if (dofMap[level].count(it.getDofIndex()) && 
+	    !nonRankDofs[level].count(it.getDofIndex()))
+	  stdMpi.getSendData(rank).
+	    push_back(dofMap[level][it.getDofIndex()].global);
+    }
 
     stdMpi.updateSendDataSize();
 
@@ -112,8 +119,13 @@ namespace AMDiS {
 	}
       }
 
-      if (recvFromRank)
-	stdMpi.recv(it.getRank());
+      if (recvFromRank) {
+	int rank = it.getRank();
+	if (meshLevel > 0)
+	  rank = levelData->mapRank(rank, 0, meshLevel);
+
+	stdMpi.recv(rank);
+      }
     }
 
 
@@ -126,10 +138,14 @@ namespace AMDiS {
     
     for (DofComm::Iterator it(dofComm->getRecvDofs(), level, feSpace);
 	 !it.end(); it.nextRank()) {
+      int rank = it.getRank();
+      if (meshLevel > 0)
+	rank = levelData->mapRank(rank, 0, meshLevel);
+
       int i = 0;
       for (; !it.endDofIter(); it.nextDof())
 	if (nonRankDofs[level].count(it.getDofIndex()))
-	  dofMap[level][it.getDofIndex()].global = stdMpi.getRecvData(it.getRank())[i++];
+	  dofMap[level][it.getDofIndex()].global = stdMpi.getRecvData(rank)[i++];
     }
   }
 
@@ -185,6 +201,17 @@ namespace AMDiS {
     }
   }
 
+  
+  void ParallelDofMapping::setMpiComm(MPI::Intracomm &m, int l)
+  {
+    mpiComm = m;
+    meshLevel = l;
+
+    for (vector<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
+	 it != feSpacesUnique.end(); ++it)
+      data[*it].setMpiComm(m, l);
+  }
+
 
   void ParallelDofMapping::setDofComm(DofComm &dc)
   {
@@ -350,7 +377,7 @@ namespace AMDiS {
       // === Communicate the matrix indices for all DOFs that are on some ===
       // === interior boundaries.                                         ===
 
-      StdMpi<vector<DegreeOfFreedom> > stdMpi(levelData->getMpiComm(0));
+      StdMpi<vector<DegreeOfFreedom> > stdMpi(mpiComm);
       for (DofComm::Iterator it(dofComm->getSendDofs(), level, feSpaces[i]); 
 	   !it.end(); it.nextRank()) {
 	vector<DegreeOfFreedom> sendGlobalDofs;
@@ -362,22 +389,35 @@ namespace AMDiS {
 	    else
 	      sendGlobalDofs.push_back(dofToMatIndex[level].get(i, it.getDofIndex()));
 	
-	stdMpi.send(it.getRank(), sendGlobalDofs);
+	int rank = it.getRank();
+	if (meshLevel > 0)
+	  rank = levelData->mapRank(rank, 0, meshLevel);
+	
+	stdMpi.send(rank, sendGlobalDofs);
       }
       
       for (DofComm::Iterator it(dofComm->getRecvDofs(), level, feSpaces[i]); 
-	   !it.end(); it.nextRank())
-	stdMpi.recv(it.getRank());
+	   !it.end(); it.nextRank()) {
+	int rank = it.getRank();
+	if (meshLevel > 0)
+	  rank = levelData->mapRank(rank, 0, meshLevel);
+
+	stdMpi.recv(rank);
+      }
       
       stdMpi.startCommunication();
       
       {
 	for (DofComm::Iterator it(dofComm->getRecvDofs(), level, feSpaces[i]); 
 	     !it.end(); it.nextRank()) {
+	  int rank = it.getRank();
+	  if (meshLevel > 0)
+	    rank = levelData->mapRank(rank, 0, meshLevel);
+	  
 	  int counter = 0;
 	  for (; !it.endDofIter(); it.nextDof()) {
 	    if (dofMap.count(it.getDofIndex())) {
-	      DegreeOfFreedom d = stdMpi.getRecvData(it.getRank())[counter++];
+	      DegreeOfFreedom d = stdMpi.getRecvData(rank)[counter++];
 	      if (globalIndex)
 		dofToMatIndex[level].add(i, dofMap[it.getDofIndex()].global, d);
 	      else
diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h
index 1c2a58198d4c33aedea5505469104cfd66eb1639..3f14568434fee213e0dafc1c05e31fcebc1ddcf8 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.h
+++ b/AMDiS/src/parallel/ParallelDofMapping.h
@@ -216,6 +216,12 @@ namespace AMDiS {
       dofComm = &dc;
     }
 
+    void setMpiComm(MPI::Intracomm &m, int l)
+    {
+      mpiComm = m;
+      meshLevel = l;
+    }
+
   private:
     /// Computes a global mapping from the local one.
     void computeGlobalMapping(int level);
@@ -230,6 +236,10 @@ namespace AMDiS {
     /// DOF communicator for all DOFs on interior boundaries.
     DofComm *dofComm;
 
+    MPI::Intracomm mpiComm;
+
+    int meshLevel;
+
     /// The FE space this mapping belongs to. This is used only the get the
     /// correct DOF communicator in \ref dofComm.
     const FiniteElemSpace *feSpace;
@@ -297,6 +307,8 @@ namespace AMDiS {
 	      vector<const FiniteElemSpace*> &uniqueFe,
 	      bool needGlobalMapping,
 	      bool bNonLocalDofs);
+
+    void setMpiComm(MPI::Intracomm &m, int l);
     
     /// Clear all data.
     void clear();
@@ -412,6 +424,10 @@ namespace AMDiS {
     int computeStartDofs(int level);
 
   private:
+    MPI::Intracomm mpiComm;
+
+    int meshLevel;
+
     MeshLevelData *levelData;
 
     /// DOF communicator for all DOFs on interior boundaries.
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index b7163f260e11139302effd260e063664772767fa..7a3b02f6bc3330f7f4255317de97fc16490a06a0 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -363,7 +363,7 @@ namespace AMDiS {
     // === create local indices of the primals starting at zero.          ===
 
     for (DofIndexSet::iterator it = primals.begin(); it != primals.end(); ++it)
-      if (meshDistributor->getDofMap()[feSpace].isRankDof(*it, meshLevel))
+      if (meshDistributor->getDofMap()[feSpace].isRankDof(*it, 0))
 	primalDofMap[feSpace].insertRankDof(meshLevel, *it);
       else
   	primalDofMap[feSpace].insertNonRankDof(meshLevel, *it);
@@ -453,7 +453,7 @@ namespace AMDiS {
     DofMap& dualMap = dualDofMap[feSpace].getMap(meshLevel);
     for (DofMap::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
 
-      if (meshDistributor->getDofMap()[feSpace].isRankDof(it->first, meshLevel)) {
+      if (meshDistributor->getDofMap()[feSpace].isRankDof(it->first, 0)) {
 	lagrangeMap[feSpace].insertRankDof(meshLevel, it->first, nRankLagrange);
 	int degree = boundaryDofRanks[feSpace][it->first].size();
 	nRankLagrange += (degree * (degree - 1)) / 2;
diff --git a/test/mpi/src/test0006.cc b/test/mpi/src/test0006.cc
index 3c7f9ae580b013368e6f8c7a548f2afb0e983807..a9f0b467665c675e4a9d3b92131921cff9219d9d 100644
--- a/test/mpi/src/test0006.cc
+++ b/test/mpi/src/test0006.cc
@@ -1,5 +1,5 @@
 #define BOOST_TEST_DYN_LINK
-#define BOOST_TEST_MODULE 0005
+#define BOOST_TEST_MODULE 0006
 #define BOOST_TEST_NO_MAIN
 
 #include <boost/test/unit_test.hpp>