From 41a2b884da42aebe9f64433d936bb4e70f3e0db5 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Mon, 20 Aug 2012 07:59:27 +0000
Subject: [PATCH] Fixed some parallel bugs.

---
 AMDiS/src/Debug.cc                            |  9 +++++++++
 AMDiS/src/parallel/InteriorBoundary.cc        |  4 ----
 AMDiS/src/parallel/PeriodicMap.cc             | 13 ++++++-------
 AMDiS/src/parallel/PetscSolver.cc             |  5 ++---
 AMDiS/src/parallel/PetscSolver.h              |  7 ++-----
 AMDiS/src/parallel/PetscSolverFeti.cc         |  3 ++-
 AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 11 +++++++----
 7 files changed, 28 insertions(+), 24 deletions(-)

diff --git a/AMDiS/src/Debug.cc b/AMDiS/src/Debug.cc
index f949ef91..2b652c1d 100644
--- a/AMDiS/src/Debug.cc
+++ b/AMDiS/src/Debug.cc
@@ -329,9 +329,17 @@ namespace AMDiS {
     {
       FUNCNAME("debug::printInfoByDof()");
 
+      WorldVector<double> coords;
+      feSpace->getMesh()->getDofIndexCoords(dof, feSpace, coords);
+
       Element *el = getDofIndexElement(feSpace, dof);
       Element *parEl = getLevel0ParentElement(feSpace->getMesh(), el);
       
+      if (coords.getSize() == 2)
+	MSG("[DBG] DOF-INFO:  dof = %d  coords: %e %e\n", dof, coords[0], coords[1]);
+      else 
+	MSG("[DBG] DOF-INFO:  dof = %d  coords: %e %e %e\n", dof, coords[0], coords[1], coords[2]);
+
       MSG("[DBG] DOF-INFO:  dof = %d  elidx = %d  pelidx = %d\n", 
 	  dof, el->getIndex(), parEl->getIndex());
       
@@ -341,6 +349,7 @@ namespace AMDiS {
       while (elInfo) {
 	if (elInfo->getElement()->getIndex() == parEl->getIndex()) {
 	  MSG("[DBG] EL INFO TO %d: type = %d\n", parEl->getIndex(), elInfo->getType());
+	  break;
 	}
 
 	elInfo = stack.traverseNext(elInfo);
diff --git a/AMDiS/src/parallel/InteriorBoundary.cc b/AMDiS/src/parallel/InteriorBoundary.cc
index d63bc4c6..feb0f5cd 100644
--- a/AMDiS/src/parallel/InteriorBoundary.cc
+++ b/AMDiS/src/parallel/InteriorBoundary.cc
@@ -477,8 +477,6 @@ namespace AMDiS {
     }
 
 
-#if 0
-
     // === Do the same for the periodic boundaries. ===
 
     if (periodic.size() > 0) {
@@ -533,8 +531,6 @@ namespace AMDiS {
 	}
       }     
     } // periodicBoundary.boundary.size() > 0
-
-#endif
   }
 
 
diff --git a/AMDiS/src/parallel/PeriodicMap.cc b/AMDiS/src/parallel/PeriodicMap.cc
index 0a32c237..2199b75c 100644
--- a/AMDiS/src/parallel/PeriodicMap.cc
+++ b/AMDiS/src/parallel/PeriodicMap.cc
@@ -54,6 +54,8 @@ namespace AMDiS {
 			   const std::set<int>& perAsc, 
 			   vector<int>& mappedDofs)
   {
+    FUNCNAME("PeriodicMap::mapDof()");
+
     mappedDofs.clear();
     mappedDofs.push_back(globalDofIndex);
     
@@ -61,13 +63,9 @@ namespace AMDiS {
 	 it != perAsc.end(); ++it) {
       int nDofs = static_cast<int>(mappedDofs.size());
       
-      for (int i = 0; i < nDofs; i++) {
-	TEST_EXIT_DBG(isPeriodic(feSpace, *it, mappedDofs[i]))
-	  ("Wrong periodic DOF associations at boundary %d with DOF %d!\n",
-	   *it, mappedDofs[i]);
-	
-	mappedDofs.push_back(map(feSpace, *it, mappedDofs[i]));
-      }
+      for (int i = 0; i < nDofs; i++) 
+	if (isPeriodic(feSpace, *it, mappedDofs[i]))
+	  mappedDofs.push_back(map(feSpace, *it, mappedDofs[i]));
     }
   }
 
@@ -84,6 +82,7 @@ namespace AMDiS {
     for (std::set<int>::iterator it = perAsc.begin(); 
 	 it != perAsc.end(); ++it) {
       int nDofs = static_cast<int>(mappedDofs.size());
+
       for (int i = 0; i < nDofs; i++) {
 	int perRowDof = 0;	
 	if (isPeriodic(rowFeSpace, *it, mappedDofs[i].first))
diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc
index 25ebd53f..51af213a 100644
--- a/AMDiS/src/parallel/PetscSolver.cc
+++ b/AMDiS/src/parallel/PetscSolver.cc
@@ -14,7 +14,7 @@
 #include "parallel/PetscSolver.h"
 #include "parallel/StdMpi.h"
 #include "parallel/MpiHelper.h"
-
+#include "parallel/ParallelDofMapping.h"
 
 namespace AMDiS {
 
@@ -24,7 +24,6 @@ namespace AMDiS {
     : meshDistributor(NULL),      
       subdomainLevel(0),
       interiorMap(NULL),
-      coarseSpaceMap(NULL),
       mpiRank(-1),
       kspPrefix(""),
       removeRhsNullspace(false),
@@ -43,7 +42,7 @@ namespace AMDiS {
 
 
   void PetscSolver::setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs, 
-					     int component = -1)
+					     int component)
   {
     FUNCNAME("PetscSolver::setCoarseSpaceDofMapping()");
 
diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h
index 211de191..39d64ebe 100644
--- a/AMDiS/src/parallel/PetscSolver.h
+++ b/AMDiS/src/parallel/PetscSolver.h
@@ -251,7 +251,7 @@ namespace AMDiS {
 
     /// Parallel DOF mapping of the (optional) coarse space. Allows to define
     /// different coarse spaces for different components.
-    map<int, ParallelDofMapping*> coarseSpaceMap;
+    std::map<int, ParallelDofMapping*> coarseSpaceMap;
 
     int mpiRank;
 
@@ -259,11 +259,8 @@ namespace AMDiS {
 
     MPI::Intracomm mpiCommLocal;
 
-
-    vector<vector<mat> >
-
     /// Petsc's matrix structure.
-    //Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
+    Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
 
     /// PETSc's vector structures for the rhs vector, the solution vector and a
     /// temporary vector for calculating the final residuum.
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index e1fb938f..991a277e 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -409,7 +409,8 @@ namespace AMDiS {
 	("Should not happen!\n");
     }
 
-    subdomain->setDofMapping(&localDofMap, &primalDofMap);
+    subdomain->setDofMapping(&localDofMap);
+    subdomain->setCoarseSpaceDofMapping(&primalDofMap);
 
     if (printTimings) {
       MPI::COMM_WORLD.Barrier();
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 85bf351a..02cacfe7 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -153,9 +153,12 @@ namespace AMDiS {
 			 localMatrix);
 
       if (coarseSpaceMap.size()) {
+	MSG("NO COARSE SPACE NNZ!\n");
+	/*
 	nnzCoarse.create(mat, mpiCommGlobal, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
 	nnzCoarseInt.create(mat, mpiCommGlobal, *coarseSpaceMap, *interiorMap, NULL, meshDistributor->getElementObjectDb());
 	nnzIntCoarse.create(mat, mpiCommGlobal, *interiorMap, *coarseSpaceMap, NULL, meshDistributor->getElementObjectDb());
+	*/
       }
     }
 
@@ -173,8 +176,8 @@ namespace AMDiS {
 
    
     if (coarseSpaceMap.size()) {
-      int nRowsRankCoarse = coarseSpaceMap->getRankDofs();
-      int nRowsOverallCoarse = coarseSpaceMap->getOverallDofs();
+      int nRowsRankCoarse = coarseSpaceMap[0]->getRankDofs();
+      int nRowsOverallCoarse = coarseSpaceMap[0]->getOverallDofs();
 
       MatCreateAIJ(mpiCommGlobal,
 		   nRowsRankCoarse, nRowsRankCoarse,
@@ -369,8 +372,8 @@ namespace AMDiS {
 
     if (coarseSpaceMap.size()) 
       VecCreateMPI(mpiCommGlobal, 
-		   coarseSpaceMap->getRankDofs(), 
-		   coarseSpaceMap->getOverallDofs(),
+		   coarseSpaceMap[0]->getRankDofs(), 
+		   coarseSpaceMap[0]->getOverallDofs(),
 		   &rhsCoarseSpace);
     
     TEST_EXIT_DBG(vec)("No DOF vector defined!\n");
-- 
GitLab