diff --git a/AMDiS/src/ProblemStat.cc b/AMDiS/src/ProblemStat.cc
index dfbb4a2dead232a1b4a39f19345d8d07b067287b..992dcf770222c1f36ba64280c354c2c70606eb48 100644
--- a/AMDiS/src/ProblemStat.cc
+++ b/AMDiS/src/ProblemStat.cc
@@ -870,6 +870,17 @@ namespace AMDiS {
     INFO(info, 8)("buildAfterCoarsen needed %.5f seconds\n", 
 		  TIME_USED(first, clock()));
 #endif
+
+#if 0
+    VtkWriter::writeFile(rhs, "rhs0.vtu");
+    for (int i = 0; i < nComponents; i++) {
+      double s = rhs->getDOFVector(i)->sum();
+      s /= -static_cast<double>(rhs->getDOFVector(i)->getUsedSize());
+      MSG("MOVE RHS: %f\n", s);
+      add(*(rhs->getDOFVector(i)), s, *(rhs->getDOFVector(i)));
+    }
+    VtkWriter::writeFile(rhs, "rhs1.vtu");
+#endif
   }
 
 
diff --git a/AMDiS/src/parallel/FeSpaceMapping.cc b/AMDiS/src/parallel/FeSpaceMapping.cc
index f48ece76d6fa927ff17c698d88b7a1924420a393..4e54865920e3cf401a017ffeed43f341205e5ff5 100644
--- a/AMDiS/src/parallel/FeSpaceMapping.cc
+++ b/AMDiS/src/parallel/FeSpaceMapping.cc
@@ -41,6 +41,8 @@ namespace AMDiS {
 
     if (overlap)
       computeNonLocalIndices();
+
+    computeMatIndex(0);
   }
 
 
@@ -89,6 +91,52 @@ namespace AMDiS {
   }
 
 
+  void GlobalDofMap::computeMatIndex(int offset)
+  {
+    FUNCNAME("GlobalDofMap::computeMatIndex()");
+
+    map<DegreeOfFreedom, int> dofToMatIndex;
+
+    for (DofMapping::iterator it = dofMap.begin(); it != dofMap.end(); ++it) {
+      if (!nonRankDofs.count(it->first)) {
+	int globalMatIndex = it->second - rStartDofs + offset;
+	dofToMatIndex[it->first] = globalMatIndex;
+      }
+    }
+
+    if (!overlap)
+      return;
+
+    TEST_EXIT_DBG(sendDofs != NULL && recvDofs != NULL)
+      ("No communicator given!\n");
+
+    StdMpi<vector<DegreeOfFreedom> > stdMpi(*mpiComm);
+    for (DofComm::Iterator it(*sendDofs, feSpace); !it.end(); it.nextRank()) {
+      vector<DegreeOfFreedom> sendGlobalDofs;
+
+      for (; !it.endDofIter(); it.nextDof())
+	if (dofMap.count(it.getDofIndex()))
+	  sendGlobalDofs.push_back(dofToMatIndex[it.getDofIndex()]);
+
+      stdMpi.send(it.getRank(), sendGlobalDofs);
+    }
+
+    for (DofComm::Iterator it(*recvDofs, feSpace); !it.end(); it.nextRank())
+      stdMpi.recv(it.getRank());
+
+    stdMpi.startCommunication();
+
+    {
+      int i = 0;
+      for (DofComm::Iterator it(*recvDofs, feSpace); !it.end(); it.nextRank()) 
+	for (; !it.endDofIter(); it.nextDof())
+	  if (dofMap.count(it.getDofIndex()))
+	    dofToMatIndex[it.getDofIndex()] = 
+	      stdMpi.getRecvData(it.getRank())[i++];
+    }
+  }
+
+
   void GlobalDofMap::print()
   {
     FUNCNAME("GlobalDofMap::print()");
diff --git a/AMDiS/src/parallel/FeSpaceMapping.h b/AMDiS/src/parallel/FeSpaceMapping.h
index 0170e3e35f29d6407d66b5d9c8e456e1c042452a..d75e1a2e6e24efc40303546a3b7ac4d430c30421 100644
--- a/AMDiS/src/parallel/FeSpaceMapping.h
+++ b/AMDiS/src/parallel/FeSpaceMapping.h
@@ -46,6 +46,8 @@ namespace AMDiS {
     GlobalDofMap(MPI::Intracomm* m)
       : mpiComm(m),
 	feSpace(NULL),
+	sendDofs(NULL),
+	recvDofs(NULL),
 	nRankDofs(0),
 	nOverallDofs(0),
 	rStartDofs(0),
@@ -102,6 +104,8 @@ namespace AMDiS {
 
     void computeNonLocalIndices();
 
+    void computeMatIndex(int offset);
+
     void print();
 
     void setFeSpace(const FiniteElemSpace *fe)
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 264232029bde5da500407b1609b58a921a128c8e..51e052e1c9ceb0412a0af2bce7675683de1c7762 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -31,6 +31,7 @@
 #include "parallel/DofComm.h"
 #include "io/ElementFileWriter.h"
 #include "io/MacroInfo.h"
+#include "io/MacroWriter.h"
 #include "io/VtkWriter.h"
 #include "Mesh.h"
 #include "Traverse.h"
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 8319276d7d05712d93afc9024d29cadb7cc7dc7e..bf407e0a824557a0211cb3d7d333a13bc6346244 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -260,8 +260,7 @@ namespace AMDiS {
     primalDofMap[feSpace].update();
 
     MSG("nRankPrimals = %d   nOverallPrimals = %d\n", 
-	primalDofMap[feSpace].nRankDofs, 
-	primalDofMap[feSpace].nOverallDofs);
+	primalDofMap[feSpace].nRankDofs, primalDofMap[feSpace].nOverallDofs);
 
     TEST_EXIT_DBG(primals.size() == primalDofMap[feSpace].size())
       ("Number of primals %d, but number of global primals on this rank is %d!\n",
@@ -433,8 +432,7 @@ namespace AMDiS {
     
     localDofMap[feSpace].update(false);
 
-    TEST_EXIT_DBG(nLocalInterior + 
-		  primalDofMap[feSpace].size() + 
+    TEST_EXIT_DBG(nLocalInterior + primalDofMap[feSpace].size() + 
 		  dualDofMap[feSpace].size() == 
 		  static_cast<unsigned int>(admin->getUsedDofs()))
       ("Should not happen!\n");
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 888bc3e4c356c0c5bf756500b4ec11c57b83b0dc..b5300c8d232effa2be9e594eb5592c0397250d06 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -129,7 +129,7 @@ namespace AMDiS {
   void PetscSolverGlobalMatrix::fillPetscRhs(SystemVector *vec)
   {
     FUNCNAME("PetscSolverGlobalMatrix::fillPetscRhs()");
-   
+
     TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
     TEST_EXIT_DBG(meshDistributor)("No mesh distributor defined!\n");
 
@@ -211,7 +211,6 @@ namespace AMDiS {
     // Print iteration counter and residual norm of the solution.
     printSolutionInfo(adaptInfo);
 
-
     // === Destroy PETSc's variables. ===
 
     VecDestroy(&petscRhsVec);