diff --git a/AMDiS/src/parallel/MatrixNnzStructure.cc b/AMDiS/src/parallel/MatrixNnzStructure.cc
index e8484c33a272276177b6c283f6d20a288cf94393..9793fbbaab75ac605e09fa4091c15a0581472e52 100644
--- a/AMDiS/src/parallel/MatrixNnzStructure.cc
+++ b/AMDiS/src/parallel/MatrixNnzStructure.cc
@@ -191,13 +191,13 @@ namespace AMDiS {
 		     icend = end<nz>(cursor); icursor != icend; ++icursor) {
 		if (colDofMap[colFeSpace].find(col(*icursor), colDofIndex) == false)
 		  continue;
-
+		
 		// Set of periodic row associations (is empty, if row DOF is not 
 		// periodic.
-		std::set<int> perColAsc;
+		std::set<int> perColAsc = perRowAsc;
 		if (perMap)
 		  perMap->fillAssociations(colFeSpace, colDofIndex.global, elObjDb, perColAsc);
-
+		
 		if (perColAsc.empty()) {
 		  if (colDofMap[colFeSpace].isRankDof(col(*icursor)))
 		    dnnz[localPetscRowIdx]++;
@@ -206,7 +206,7 @@ namespace AMDiS {
 		} else {
 		  vector<int> newCols;
 		  perMap->mapDof(colFeSpace, colDofIndex.global, perColAsc, newCols);
-
+		  
 		  for (int aa = 0; aa < newCols.size(); aa++) {
 		    int petscColIdx = colDofMap.getMatIndex(colComp, newCols[aa]);
 		    
@@ -221,6 +221,14 @@ namespace AMDiS {
 		  }
 		}
 	      }
+	      
+ 	      if (!perRowAsc.empty()) {
+		vector<int> newRows;
+		perMap->mapDof(rowFeSpace, rowIt->second.global, perRowAsc, newRows);
+		
+		dnnz[localPetscRowIdx] += 
+		  (newRows.size() - 1) * (onnz[localPetscRowIdx] + dnnz[localPetscRowIdx]);
+ 	      }
 	    }
 	  } else {
 	    // === The current row DOF is not a rank DOF, i.e., its values   ===
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index f215ae2d7bbad3e4a42973d7694fd576b3dd673c..795c09956cb0020a7c0428b9021683a00393660f 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -1818,13 +1818,13 @@ namespace AMDiS {
     TEST_EXIT(levelData.getLevelNumber() == 1)
       ("Periodic DOF map does not support multi level domain decomposition!\n");
 
-    MPI::COMM_WORLD.Barrier();
+    //    MPI::COMM_WORLD.Barrier();   [TODO: CHANGE BECAUSE NOT ALL RANKS HAVE PERIODIC MAP!!!]
     double first = MPI::Wtime();
     
     for (unsigned int i = 0; i < feSpaces.size(); i++)
       createPeriodicMap(feSpaces[i]);
 
-    MPI::COMM_WORLD.Barrier();
+    //    MPI::COMM_WORLD.Barrier();
     INFO(info, 8)("Creation of periodic mapping needed %.5f seconds\n", 
 		  MPI::Wtime() - first);
   }
diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h
index c5b6681495f7921ad08f169879800478379602e6..16d1a4bf578af83926dc30d17b7cd7e5e8d33c0d 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.h
+++ b/AMDiS/src/parallel/ParallelDofMapping.h
@@ -184,13 +184,18 @@ namespace AMDiS {
       return static_cast<bool>(dofMap.count(dof));
     }
 
-    /// Checks if a given DOF is a rank owned DOF of the DOF mapping. The DOF must
-    /// a DOF of the mapping (this is not checked here), otherwise the result is
-    /// meaningsless.
+    /// Checks if a given DOF is a rank owned DOF of the DOF mapping. The DOF
+    /// must be a DOF of the mapping (this is not checked here), otherwise the
+    /// result is meaningsless.
     bool isRankDof(DegreeOfFreedom dof)
     {
       return !(static_cast<bool>(nonRankDofs.count(dof)));
     }
+
+    bool isRankGlobalDof(int dof)
+    {
+      return (dof >= rStartDofs && dof < rStartDofs + nRankDofs);
+    }
     
     /// Returns number of DOFs in the mapping.
     unsigned int size()
diff --git a/AMDiS/src/parallel/PeriodicMap.cc b/AMDiS/src/parallel/PeriodicMap.cc
index 1fe38a99e749157b42bafafc2027905b880fe85f..0a32c2379150800528f96ee16ac74b2c9b5c8272 100644
--- a/AMDiS/src/parallel/PeriodicMap.cc
+++ b/AMDiS/src/parallel/PeriodicMap.cc
@@ -72,6 +72,37 @@ namespace AMDiS {
   }
 
 
+  void PeriodicMap::mapDof(const FiniteElemSpace* rowFeSpace,
+			   const FiniteElemSpace* colFeSpace,
+			   pair<int, int> globalDofIndex, 
+			   const std::set<int>& perAsc, 
+			   vector<pair<int, int> >& mappedDofs)
+  {
+    mappedDofs.clear();
+    mappedDofs.push_back(globalDofIndex);
+
+    for (std::set<int>::iterator it = perAsc.begin(); 
+	 it != perAsc.end(); ++it) {
+      int nDofs = static_cast<int>(mappedDofs.size());
+      for (int i = 0; i < nDofs; i++) {
+	int perRowDof = 0;	
+	if (isPeriodic(rowFeSpace, *it, mappedDofs[i].first))
+	  perRowDof = map(rowFeSpace, *it, mappedDofs[i].first);
+	else
+	  perRowDof = mappedDofs[i].first;
+	
+	int perColDof;
+	if (isPeriodic(colFeSpace, *it, mappedDofs[i].second))
+	  perColDof = map(colFeSpace, *it, mappedDofs[i].second);
+	else
+	  perColDof = mappedDofs[i].second;	      	      
+	
+	mappedDofs.push_back(make_pair(perRowDof, perColDof));
+      }
+    }    
+  }
+
+
   void PeriodicMap::serialize(ostream &out, 
 			      vector<const FiniteElemSpace*> feSpaces)
   {
diff --git a/AMDiS/src/parallel/PeriodicMap.h b/AMDiS/src/parallel/PeriodicMap.h
index 0670eeb96370bbcd15178330c4a34933a7d2ad4c..3442787a9fdb3f26d8e151f48e004d73a0dbbe6d 100644
--- a/AMDiS/src/parallel/PeriodicMap.h
+++ b/AMDiS/src/parallel/PeriodicMap.h
@@ -159,6 +159,21 @@ namespace AMDiS {
 		const std::set<int>& perAsc, 
 		vector<int>& mappedDofs);
 
+    /** \brief
+     * Maps a given DOF index pair for all given periodic DOF associations.
+     *
+     * \param[in]   rowFeSpace      feSpace of the DOFs on the first component.
+     * \param[in]   colFeSpace      feSpace of the DOFs on the second component.
+     * \param[in]   globalDofIndex  pair of global index of a DOF.
+     * \param[in]   perAsc          set of periodic associations.
+     * \param[out]  mappedDofs      set of pairs of global DOF indices.
+     */    
+    void mapDof(const FiniteElemSpace* rowFeSpace,
+		const FiniteElemSpace* colFeSpace,
+		pair<int, int> globalDofIndex, 
+		const std::set<int>& perAsc, 
+		vector<pair<int, int> >& mappedDofs);
+
 
     /// Returns true, if the DOF (global index) is a periodic DOF.
     inline bool isPeriodic(const FiniteElemSpace *feSpace, int globalDofIndex)
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 70b1c65e70a0438ac3405c9acf68de87638dee4b..7e20c803c2ab0ed08b5e772d19b23afbd79bbbc5 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -805,32 +805,8 @@ namespace AMDiS {
 	  // === associations of the row and column indices.             ===
 
 	  vector<pair<int, int> > entry;
-	  
-	  // First, add the original entry.
-	  entry.push_back(make_pair(globalRowDof, globalColDof));
-
-	  // Then, traverse the periodic associations of the row and column
-	  // indices and create the corresponding entries.
-	  for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
-	    int nEntry = static_cast<int>(entry.size());
-	    for (int i = 0; i < nEntry; i++) {
-	      int perRowDof = 0;
-
-	      if (perMap.isPeriodic(rowFe, *it, entry[i].first))
-		perRowDof = perMap.map(rowFe, *it, entry[i].first);
-	      else
-		perRowDof = entry[i].first;
-
-	      int perColDof;
-	      if (perMap.isPeriodic(colFe, *it, entry[i].second))
-		perColDof = perMap.map(colFe, *it, entry[i].second);
-	      else
-		perColDof = entry[i].second;	      	      
-	      
-	      entry.push_back(make_pair(perRowDof, perColDof));
-	    }
-	  }
-
+	  perMap.mapDof(rowFe, colFe, make_pair(globalRowDof, globalColDof),
+			perAsc, entry);
 
 	  // === Translate the matrix entries to PETSc's matrix.