diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index 386a578e7b162c1423c4c4e8ab1da85f553c8295..936f157c8ccd3db7e74575e23eb3165a1ce113c5 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -1581,7 +1581,6 @@ namespace AMDiS {
       dofCommSd.create(intBoundarySd);
     }
 
-
     // === If requested, create more information on communication DOFs. ===
     
     if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED))
@@ -1632,6 +1631,7 @@ namespace AMDiS {
     }
   }
 
+
   void MeshDistributor::removeMacroElements()
   {
     FUNCNAME("MeshDistributor::removeMacroElements()");
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 7dbacfb16ad658a3dcb7fefc4a26182403ff24d0..352c0a205a659c4b586da60858538b2664471c2b 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -1129,6 +1129,92 @@ namespace AMDiS {
       MatView(subdomain->getMatCoarseCoarse(), petscView);
       PetscViewerDestroy(&petscView);
     }
+
+    
+    int writeSchurPrimalMatrix = 0;
+    Parameters::get("parallel->debug->write schur primal matrix",
+		    writeSchurPrimalMatrix);
+    if (writeSchurPrimalMatrix) {
+      PetscViewer petscView;
+      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "schurprimal.mat", 
+			    FILE_MODE_WRITE, &petscView);
+      MatView(mat_schur_primal, petscView);
+      PetscViewerDestroy(&petscView);
+    }
+
+
+    int writeFetiMatrix = 0;
+    Parameters::get("parallel->debug->write feti matrix",
+		    writeFetiMatrix);
+    if (writeFetiMatrix) {
+      MSG("Start creating explicit FETI-DP matrix!\n");
+
+      Vec unitVector;
+      Vec resultVector;
+
+      Mat fetiMat;
+      MatCreateAIJ(mpiCommGlobal,
+		   lagrangeMap.getRankDofs(), 
+		   lagrangeMap.getRankDofs(),
+		   lagrangeMap.getOverallDofs(), 
+		   lagrangeMap.getOverallDofs(),
+		   lagrangeMap.getOverallDofs(), 
+		   PETSC_NULL, 
+		   lagrangeMap.getOverallDofs(), 
+		   PETSC_NULL, &fetiMat);
+
+      VecCreateMPI(mpiCommGlobal,
+		   lagrangeMap.getRankDofs(), lagrangeMap.getOverallDofs(), 
+		   &unitVector);
+      VecCreateMPI(mpiCommGlobal,
+		   lagrangeMap.getRankDofs(), lagrangeMap.getOverallDofs(), 
+		   &resultVector);
+
+      PetscInt low, high;
+      VecGetOwnershipRange(unitVector, &low, &high);
+      int nLocal = high - low;
+      int nnzCounter = 0;
+
+      for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) {
+	VecSet(unitVector, 0.0);
+	if (i >= low && i < high)
+	  VecSetValue(unitVector, i, 1.0, INSERT_VALUES);
+	VecAssemblyBegin(unitVector);
+	VecAssemblyEnd(unitVector);
+
+	MatMult(mat_feti, unitVector, resultVector);
+
+	PetscScalar *vals;
+	VecGetArray(resultVector, &vals);
+
+	for (int j = 0; j < nLocal; j++)
+	  if (fabs(vals[j]) > 1e-30) {
+	    MatSetValue(fetiMat, low + j, i, vals[j], INSERT_VALUES);
+	    nnzCounter++;
+	  }	 
+
+	VecRestoreArray(resultVector, &vals);
+      }
+
+      MatAssemblyBegin(fetiMat, MAT_FINAL_ASSEMBLY);
+      MatAssemblyEnd(fetiMat, MAT_FINAL_ASSEMBLY);
+
+      VecDestroy(&unitVector);
+      VecDestroy(&resultVector);      
+
+      mpi::globalAdd(nnzCounter);
+
+      PetscViewer petscView;
+      PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti.mat", 
+			    FILE_MODE_WRITE, &petscView);
+      MatView(fetiMat, petscView);
+      PetscViewerDestroy(&petscView);
+
+      MatDestroy(&fetiMat);
+
+      MSG("FETI-DP matrix written: %d x %d mat with %d nnz\n", 
+	  lagrangeMap.getOverallDofs(), lagrangeMap.getOverallDofs(), nnzCounter);
+    }
 #endif
   }