diff --git a/AMDiS/src/ProblemStat.cc b/AMDiS/src/ProblemStat.cc
index dfbb4a2dead232a1b4a39f19345d8d07b067287b..a1bfc80efb65ca97ad4eaa22291253800beeac45 100644
--- a/AMDiS/src/ProblemStat.cc
+++ b/AMDiS/src/ProblemStat.cc
@@ -821,8 +821,21 @@ namespace AMDiS {
 	  continue;
 	}
 
-	if (assembleMatrix && matrix->getBoundaryManager())
+	if (assembleMatrix && matrix->getBoundaryManager()) {
+	  MSG("TEST MAT FOR PERIODIC BC: %p  on problem %s\n", matrix, name.c_str());
+	  BoundaryIndexMap &boundaryMap = const_cast<BoundaryIndexMap&>(matrix->getBoundaryManager()->getBoundaryConditionMap());
+	  MSG("PTR: %p\n", &boundaryMap);
+
+	  BoundaryIndexMap::iterator it = boundaryMap.begin();
+ 	  while (it != boundaryMap.end()) {
+  	    if (it->second->isPeriodic())
+  	      MSG("HAS PERIODIC!\n");
+ 	    ++it;
+ 	  }
+	  MSG("TEST A\n");
 	  matrix->getBoundaryManager()->initMatrix(matrix);
+	  MSG("TEST B\n");
+	}
 
 	// The simplest case: either the right hand side has no operaters, no aux
 	// fe spaces, or all aux fe spaces are equal to the row and col fe space.
diff --git a/AMDiS/src/parallel/ElementObjectDatabase.cc b/AMDiS/src/parallel/ElementObjectDatabase.cc
index 5e42c5f454603a176b9d1e6c71967fae7e067269..a484363fa2d8a20f90baaba204ad3ec1f8a56fee 100644
--- a/AMDiS/src/parallel/ElementObjectDatabase.cc
+++ b/AMDiS/src/parallel/ElementObjectDatabase.cc
@@ -161,7 +161,6 @@ namespace AMDiS {
 	multPeriodicDof3.push_back(it->first);
     }
 
-
     if (mesh->getDim() == 2) {
       TEST_EXIT_DBG(multPeriodicDof2.size() == 0 || 
 		    multPeriodicDof2.size() == 4)
@@ -173,8 +172,7 @@ namespace AMDiS {
 		    multPeriodicDof3.size() == 8)
 	("Should not happen (%d)!\n", multPeriodicDof3.size());
     }
-    
-    
+
     if (multPeriodicDof2.size() > 0) {
       for (unsigned int i = 0; i < multPeriodicDof2.size(); i++) {
 	DegreeOfFreedom dof0 = multPeriodicDof2[i];
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 8168809d563311db20ac9d776e63c420f7adde06..26cebdef7fdf2a6c9a9ab620340f7c4c31756f60 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -457,13 +457,20 @@ namespace AMDiS {
       deserialized = true;
     }
 
+    MSG("PUSH BACK THAT SHITT!\n");
     problemStat.push_back(probStat);
 
-    // If the mesh distributor is already initialized, don't forgett to set rank
-    // DOFs object to the matrices and vectors of the added stationary problem.
+    // If the mesh distributor is already initialized, don't forget to set rank
+    // DOFs object to the matrices and vectors of the added stationary problem,
+    // and to remove the periodic boundary conditions on these objects.
 
-    if (initialized)
+    if (initialized) {
+      MSG("INIT 0\n");
       setRankDofs(probStat);
+      removePeriodicBoundaryConditions(probStat);
+    } else {
+      MSG("INIT 1\n");
+    }
   }
 
 
@@ -717,23 +724,8 @@ namespace AMDiS {
     FUNCNAME("MeshDistributor::removePeriodicBoundaryConditions()");
 
     // Remove periodic boundaries in boundary manager on matrices and vectors.
-    for (unsigned int i = 0; i < problemStat.size(); i++) {
-      int nComponents = problemStat[i]->getNumComponents();
-
-      for (int j = 0; j < nComponents; j++) {
-	for (int k = 0; k < nComponents; k++) {
-	  DOFMatrix* mat = problemStat[i]->getSystemMatrix(j, k);
-	  if (mat && mat->getBoundaryManager())
-	    removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));
-	}
-	
-	if (problemStat[i]->getSolution()->getDOFVector(j)->getBoundaryManager())
-	  removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(problemStat[i]->getSolution()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
-	
-	if (problemStat[i]->getRhs()->getDOFVector(j)->getBoundaryManager())
-	  removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(problemStat[i]->getRhs()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
-      }
-    }
+    for (unsigned int i = 0; i < problemStat.size(); i++)
+      removePeriodicBoundaryConditions(problemStat[i]);
 
     // Remove periodic boundaries on elements in mesh.
     TraverseStack stack;
@@ -748,15 +740,56 @@ namespace AMDiS {
   }
 
 
+  void MeshDistributor::removePeriodicBoundaryConditions(ProblemStatSeq *probStat)
+  {
+    FUNCNAME("MeshDistributor::removePeriodicBoundaryConditions()");
+
+    MSG("REMOVE ON PROB STAT %s\n", probStat->getName().c_str());
+
+    int nComponents = probStat->getNumComponents();
+
+    MSG("NUM: %d\n", nComponents);
+
+    for (int j = 0; j < nComponents; j++) {
+      for (int k = 0; k < nComponents; k++) {
+	DOFMatrix* mat = probStat->getSystemMatrix(j, k);
+	if (mat && mat->getBoundaryManager())
+	  removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(mat->getBoundaryManager()->getBoundaryConditionMap()));
+      }
+      
+      if (probStat->getSolution()->getDOFVector(j)->getBoundaryManager())
+	removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(probStat->getSolution()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
+      
+      if (probStat->getRhs()->getDOFVector(j)->getBoundaryManager())
+	removePeriodicBoundaryConditions(const_cast<BoundaryIndexMap&>(probStat->getRhs()->getDOFVector(j)->getBoundaryManager()->getBoundaryConditionMap()));
+    }
+  }
+
+
   void MeshDistributor::removePeriodicBoundaryConditions(BoundaryIndexMap& boundaryMap)
   {
+    FUNCNAME("MeshDistributor::removePeriodicBoundaryConditions()");
+
+    MSG("START REMOVE: %p\n", &boundaryMap);
+
     BoundaryIndexMap::iterator it = boundaryMap.begin();
     while (it != boundaryMap.end()) {
-      if (it->second->isPeriodic())
+      if (it->second->isPeriodic()) {
+	MSG("FOUND AND REMOVE!\n");
 	boundaryMap.erase(it++);
-      else
+      } else
 	++it;      
     }    
+
+    MSG("AND TEST AGAIN!\n");
+
+    it = boundaryMap.begin();
+    while (it != boundaryMap.end()) {
+      if (it->second->isPeriodic())
+	MSG("FOUND PER!\n");
+
+      ++it;      
+    }    
   }
 
 
@@ -1561,12 +1594,16 @@ namespace AMDiS {
       if (elObjDb.isInRank(it->first.first, mpiRank) == false)
 	continue;
 
+      //      MSG("PERIODIC DOF: %d -> %d with ASOC %d\n", it->first.first, it->first.second, it->second);
+
       ElementObjectData& perDofEl0 = 
 	elObjDb.getElementsInRank(it->first.first)[mpiRank];
 
       for (map<int, ElementObjectData>::iterator elIt = elObjDb.getElementsInRank(it->first.second).begin();
 	   elIt != elObjDb.getElementsInRank(it->first.second).end(); ++elIt) {
 
+	//	MSG("   in el %d %d\n", perDofEl0.elIndex, perDofEl0.ithObject);
+
 	int otherElementRank = elIt->first;
 	ElementObjectData& perDofEl1 = elIt->second;
 
@@ -2050,6 +2087,13 @@ namespace AMDiS {
   {
     FUNCNAME("MeshDistributor::createPeriodicMap()");
 
+
+//     vector<ElementObjectData> objs = elObjDb.getElements(0);
+//     MSG("SIZE OF 0-DOF is: %d\n", objs.size());
+
+//     for (int i = 0; i < objs.size(); i++)
+//       MSG("0-DOF IN EL %d - %d\n", objs[i].elIndex, objs[i].ithObject);
+
     StdMpi<vector<int> > stdMpi(mpiComm, false);
 
     // === Each rank traverse its periodic boundaries and sends the DOF      ===
@@ -2088,8 +2132,15 @@ namespace AMDiS {
 	    DegreeOfFreedom globalDof1 = 
 	      dofFeData[feSpace].mapDofToGlobal[*(dofs1[j])];
 
-	    if (!periodicMap.isPeriodicOnBound(feSpace, type, globalDof0))
+	    if (!periodicMap.isPeriodicOnBound(feSpace, type, globalDof0)) {
+// 	      if (globalDof0 == 0) {
+// 		MSG("A-ADD DOF 0 at BOUND: %d %d %d  <-> %d %d %d \n",
+// 		    bound.rankObj.elIndex, bound.rankObj.subObj, bound.rankObj.ithObj,
+// 		    bound.neighObj.elIndex, bound.neighObj.subObj, bound.neighObj.ithObj);
+// 	      }
+
 	      periodicMap.add(feSpace, type, globalDof0, globalDof1);
+	    }
 	  }
 	}
 
@@ -2101,6 +2152,9 @@ namespace AMDiS {
 	for (vector<AtomicBoundary>::iterator boundIt = it->second.begin();
 	     boundIt != it->second.end(); ++boundIt) {
 
+// 	  MSG("  NOW ADD DOFS ON PER BOUND EL %d %d with rank %d\n",
+// 	      boundIt->rankObj.elIndex, boundIt->rankObj.subObj, it->first);
+
 	  int nDofs = dofs.size();
 	  boundIt->rankObj.el->getAllDofs(feSpace, boundIt->rankObj, dofs);
 
@@ -2111,7 +2165,7 @@ namespace AMDiS {
 	// Send the global indices to the rank on the other side.
 	stdMpi.getSendData(it->first).reserve(dofs.size());
 	for (unsigned int i = 0; i < dofs.size(); i++)
-	  stdMpi.getSendData(it->first).push_back(dofFeData[feSpace].mapDofToGlobal[*(dofs[i])]);	
+	  stdMpi.getSendData(it->first).push_back(dofFeData[feSpace].mapDofToGlobal[*(dofs[i])]);
 	
 	// Receive from this rank the same number of dofs.
 	stdMpi.recv(it->first, dofs.size());
@@ -2143,8 +2197,14 @@ namespace AMDiS {
 
 	// Check if this global DOF with the corresponding boundary type was
 	// not added before by another periodic boundary from other rank.
-	if (!periodicMap.isPeriodicOnBound(feSpace, type, globalDofIndex))
+	if (!periodicMap.isPeriodicOnBound(feSpace, type, globalDofIndex)) {
+// 	  MSG("B-ADD DOF %d -> %d at BOUND from rank %d\n", 
+// 	      globalDofIndex, 
+// 	      mapGlobalDofIndex,
+// 	      it->first);
+
 	  periodicMap.add(feSpace, type, globalDofIndex, mapGlobalDofIndex);
+	}
       }
     }
 
@@ -2175,11 +2235,17 @@ namespace AMDiS {
 	    periodicMap.getAssociations(feSpace, globalDof);
 	  TEST_EXIT_DBG(assoc.size() > 0)("Should not happen!\n");
 
-	  for (std::set<BoundaryType>::iterator perAscIt =  assoc.begin();
+	  for (std::set<BoundaryType>::iterator perAscIt = assoc.begin();
 	       perAscIt != assoc.end(); ++perAscIt)
-	    if (*perAscIt >= -3)
+	    if (*perAscIt >= -3) {
+// 	      MSG(" add map DOF %d -> %d (%d)\n", 
+// 		  globalDof,
+// 		  periodicMap.map(feSpace, *perAscIt, globalDof),
+// 		  *perAscIt);
+
 	      perObjMap[*perAscIt][globalDof] = 
 		periodicMap.map(feSpace, *perAscIt, globalDof);
+	    }
 	}
       }
 
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index fe6ed677031d0916f81eb048e31a5cc116132087..8acafc3f47edb4e90043fe415dcca91337e747b7 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -523,6 +523,10 @@ namespace AMDiS {
     /// vectors of all stationary problems and from the mesh itself.
     void removePeriodicBoundaryConditions();
 
+    /// Removes all periodic boundary condition information from all matrices and
+    /// vector of a given stationary problem.
+    void removePeriodicBoundaryConditions(ProblemStatSeq *probStat);
+
     // Removes all periodic boundaries from a given boundary map.
     void removePeriodicBoundaryConditions(BoundaryIndexMap& boundaryMap);
 
diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc
index c0530c073747d1a91d0fc60310822bb125f2ec5c..94e2a4ae2f25092b56be880292b1188f314d513a 100644
--- a/AMDiS/src/parallel/ParallelDebug.cc
+++ b/AMDiS/src/parallel/ParallelDebug.cc
@@ -165,9 +165,11 @@ namespace AMDiS {
       WorldVector<double> c;
       pdb.mesh->getDofIndexCoords(it->first, pdb.feSpaces[0], c);
       int nAssoc = it->second.size();
+#if 0
       TEST_EXIT_DBG(nAssoc == 1 || nAssoc == 3 || (pdb.mesh->getDim() == 3 && nAssoc == 7))
 	("Should not happen! DOF %d (%e %e %e) has %d periodic associations!\n", 
 	 it->first, c[0], c[1], (pdb.mesh->getDim() == 2 ? 0.0 : c[2]), nAssoc);
+#endif
     }    
 
 
diff --git a/AMDiS/src/parallel/PeriodicMap.cc b/AMDiS/src/parallel/PeriodicMap.cc
index 8d7af1502d2b6e16b2f411e7cef82c3594a5b320..d7a54b6f8f84f1d33839281d89c83a7beecc3896 100644
--- a/AMDiS/src/parallel/PeriodicMap.cc
+++ b/AMDiS/src/parallel/PeriodicMap.cc
@@ -22,8 +22,10 @@ namespace AMDiS {
     
     for (PeriodicDofMap::iterator it = newMap.begin(); it != newMap.end(); ++it)
       for (DofMapping::iterator dofIt =it->second.begin();
-	   dofIt != it->second.end(); ++dofIt)
+	   dofIt != it->second.end(); ++dofIt) {
 	add(feSpace, it->first, dofIt->second, dofIt->first);
+	//	MSG("ADD MAP %d %d with type %d\n", dofIt->second, dofIt->first, it->first);
+      }
   }
 
 
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index c86fd1d3b7e2addb0da084cd72a1dcd3140bc246..2c2fb91e7b91c48a98381145f54b7ec28564cd5d 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -221,9 +221,10 @@ namespace AMDiS {
 
       createPrimals(feSpace);      
       createDuals(feSpace);
-      createLagrange(feSpace);      
       createIndexB(feSpace);
     }
+
+    createLagrange(meshDistributor->getFeSpaces());
   }
 
 
@@ -382,18 +383,19 @@ namespace AMDiS {
     dualDofMap[feSpace].update(false);
 
     MSG("nRankDuals = %d   nOverallDuals = %d\n",
-	dualDofMap[feSpace].nRankDofs, 
-	dualDofMap[feSpace].nOverallDofs);
+	dualDofMap[feSpace].nRankDofs, dualDofMap[feSpace].nOverallDofs);
   }
 
   
-  void PetscSolverFeti::createLagrange(const FiniteElemSpace *feSpace)
+  void PetscSolverFeti::createLagrange(vector<const FiniteElemSpace*> &feSpaces)
   {
     FUNCNAME("PetscSolverFeti::createLagrange()");
 
     // === Reserve for each dual node, on the rank that owns this node, the ===
     // === appropriate number of Lagrange constraints.                      ===
 
+
+#if 0    
     int nRankLagrange = 0;
     DofMapping& dualMap = dualDofMap[feSpace].getMap();
     for (DofMapping::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
@@ -404,11 +406,10 @@ namespace AMDiS {
       }
     }
     lagrangeMap[feSpace].nRankDofs = nRankLagrange;
-    lagrangeMap[feSpace].update();
+    lagrangeMap[feSpace].update(false);
     
     MSG("nRankLagrange = %d  nOverallLagrange = %d\n",
-	lagrangeMap[feSpace].nRankDofs, 
-	lagrangeMap[feSpace].nOverallDofs);
+	lagrangeMap[feSpace].nRankDofs, lagrangeMap[feSpace].nOverallDofs);
 
 
     // === Communicate lagrangeMap to all other ranks. ===
@@ -451,6 +452,7 @@ namespace AMDiS {
 	}
       }
     }
+#endif
   }
 
 
@@ -490,8 +492,10 @@ namespace AMDiS {
 
     localDofMap[feSpace].update(false);
 
+#if 0
     dualDofMap[feSpace].addOffset(localDofMap[feSpace].rStartDofs + 
 				  nLocalInterior);
+#endif
   }
 
 
@@ -499,8 +503,6 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::createMatLagrange()");
 
-    const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
-
     // === Create distributed matrix for Lagrange constraints. ===
 
     MatCreateMPIAIJ(PETSC_COMM_WORLD,
@@ -518,33 +520,38 @@ namespace AMDiS {
     // === m == r, than the rank sets -1.0 for the corresponding           ===
     // === constraint.                                                     ===
 
-    DofMapping &dualMap = dualDofMap[feSpace].getMap();
-    for (DofMapping::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
-      TEST_EXIT_DBG(boundaryDofRanks.count(it->first))
-	("Should not happen!\n");
-
-      // Global index of the first Lagrange constriant for this node.
-      int index = lagrangeMap[feSpace][it->first];
-      // Copy set of all ranks that contain this dual node.
-      vector<int> W(boundaryDofRanks[it->first].begin(), 
-		    boundaryDofRanks[it->first].end());
-      // Number of ranks that contain this dual node.
-      int degree = W.size();
-
-      for (int i = 0; i < degree; i++) {
-	for (int j = i + 1; j < degree; j++) {
-	  if (W[i] == mpiRank || W[j] == mpiRank) {
-	    // Set the constraint for all components of the system.
-	    for (int k = 0; k < nComponents; k++) {
-	      int rowIndex = index * nComponents + k;
+    for (unsigned int feIdx = 0; feIdx < feSpaces.size(); feIdx++) {
+      const FiniteElemSpace *feSpace = feSpaces[feIdx];
+
+      DofMapping &dualMap = dualDofMap[feSpace].getMap();
+      for (DofMapping::iterator it = dualMap.begin(); it != dualMap.end(); ++it) {
+	TEST_EXIT_DBG(boundaryDofRanks.count(it->first))
+	  ("Should not happen!\n");
+
+	// Global index of the first Lagrange constriant for this node.
+	int constraintMatIndex = lagrangeMap.mapGlobal(it->first, feIdx);
+	// Copy set of all ranks that contain this dual node.
+	vector<int> W(boundaryDofRanks[it->first].begin(), 
+		      boundaryDofRanks[it->first].end());
+	// Number of ranks that contain this dual node.
+	int degree = W.size();
+	
+	for (int i = 0; i < degree; i++) {
+	  for (int j = i + 1; j < degree; j++) {
+	    if (W[i] == mpiRank || W[j] == mpiRank) {
+#if 1
+	      // Get global index of the dual variable
+	      int colIndex = localDofMap.mapGlobal(it->first, feIdx);
+#else
 	      int colIndex = it->second * nComponents + k;
+#endif
 	      double value = (W[i] == mpiRank ? 1.0 : -1.0);
-	      MatSetValue(mat_lagrange, rowIndex, colIndex, value, 
-			  INSERT_VALUES);
+	      MatSetValue(mat_lagrange, constraintMatIndex, colIndex, value, 
+			  INSERT_VALUES);	      
 	    }
+	    
+	    constraintMatIndex++;
 	  }
-
-	  index++;
 	}
       }
     }
diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h
index bb1a33e0312f41447952ae8147deb269f3a379c9..7bd61b590a8d28a112da6cde6152b66711d90e80 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.h
+++ b/AMDiS/src/parallel/PetscSolverFeti.h
@@ -83,7 +83,7 @@ namespace AMDiS {
 
     /// Create Lagrange multiplier variables corresponding to the dual 
     /// variables.
-    void createLagrange(const FiniteElemSpace *feSpace);
+    void createLagrange(vector<const FiniteElemSpace*> &feSpaces);
 
     /// Creates a global index of the B variables.
     void createIndexB(const FiniteElemSpace *feSpace);
@@ -151,17 +151,18 @@ namespace AMDiS {
     /// Mapping from primal DOF indices to a global index of primals.
     FeSpaceData<GlobalDofMap> primalDofMap;
 
-    /// Mapping from dual DOF indices to a global index of duals.
+    /// Index for each non primal DOF to the global index of
+    /// B variables.
+    FeSpaceData<GlobalDofMap> localDofMap;
+
+    /// Mapping from dual DOF indices to a global index within the local (B)
+    /// variables.
     FeSpaceData<GlobalDofMap> dualDofMap;
 
     /// Stores to each dual DOF index the index of the first Lagrange
     /// constraint that is assigned to this DOF.
     FeSpaceData<GlobalDofMap> lagrangeMap;
     
-    /// Index for each non primal DOF to the global index of
-    /// B variables.
-    FeSpaceData<GlobalDofMap> localDofMap;
-
     /// Stores to each dual boundary DOF the set of ranks in which the DOF
     /// is contained in.
     DofIndexToPartitions boundaryDofRanks;
diff --git a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
index 9bab030c3972cb691cb3255ea3f109ec7b065cf0..c124628f3b2e0a4d5f5517e9c48b51dbe33e1d11 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalBlockMatrix.cc
@@ -95,9 +95,6 @@ namespace AMDiS {
     KSPSetOperators(solver, petscMatrix, petscMatrix, SAME_NONZERO_PATTERN); 
     KSPSetFromOptions(solver);
 
-    KSPGetPC(solver, &pc);
-    setBlockPreconditioner(pc);
-
     MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
   }
 
@@ -137,6 +134,9 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverGlobalBlockMatrix::solvePetscMatrix()");
 
+    KSPGetPC(solver, &pc);
+    setBlockPreconditioner(pc);
+
     const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
     VecDuplicate(petscRhsVec, &petscSolVec);