From f22281e0dfeddbd20aa3f4880180e942ea1c0de9 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Mon, 16 Apr 2012 13:09:14 +0000
Subject: [PATCH] DOES NOT COMILE .... but must be submitted.

---
 AMDiS/src/parallel/DofComm.h          | 15 +++++++-----
 AMDiS/src/parallel/InteriorBoundary.h | 33 +++++++++++++++++++-------
 AMDiS/src/parallel/MeshDistributor.cc | 34 ++++++++++++++++++---------
 AMDiS/src/parallel/MeshDistributor.h  |  2 +-
 AMDiS/src/parallel/ParallelDebug.cc   |  3 ++-
 AMDiS/src/parallel/PetscSolverFeti.cc | 13 ++++++----
 AMDiS/src/parallel/PetscSolverFeti.h  |  2 ++
 7 files changed, 71 insertions(+), 31 deletions(-)

diff --git a/AMDiS/src/parallel/DofComm.h b/AMDiS/src/parallel/DofComm.h
index 58d5dd25..23572f74 100644
--- a/AMDiS/src/parallel/DofComm.h
+++ b/AMDiS/src/parallel/DofComm.h
@@ -34,16 +34,19 @@ namespace AMDiS {
   class DofComm
   {
   public:
-    DofComm() {}
+    DofComm() 
+      : data(1)
+    {}
     
     typedef map<const FiniteElemSpace*, DofContainer> FeMapType;
     typedef FeMapType::iterator FeMapIter;
-    typedef map<int, FeMapType> DataType;
+    // meshLevel: map[rank -> map[feSpace -> DofContainer]]
+    typedef vector<int, map<int, FeMapType> > DataType;
     typedef DataType::iterator DataIter;
 
-    DofContainer& getDofCont(int rank, const FiniteElemSpace *feSpace)
+    DofContainer& getDofContainer(int rank, const FiniteElemSpace *feSpace)
     {
-      return data[rank][feSpace];
+      return data[0][rank][feSpace];
     }
 
     void removeEmpty();
@@ -75,7 +78,7 @@ namespace AMDiS {
       {
 	FUNCNAME("DofComm::Iterator::Iterator()");
 
-	dataIter = dofComm.data.begin();
+	dataIter = dofComm.data[0].begin();
 
 	while (setNextFeMap() == false)
 	  ++dataIter;
@@ -83,7 +86,7 @@ namespace AMDiS {
 
       inline bool end()
       {
-	return (dataIter == dofComm.data.end());
+	return (dataIter == dofComm.data[0].end());
       }
       
       inline void nextRank()
diff --git a/AMDiS/src/parallel/InteriorBoundary.h b/AMDiS/src/parallel/InteriorBoundary.h
index 1865c2c3..fa105823 100644
--- a/AMDiS/src/parallel/InteriorBoundary.h
+++ b/AMDiS/src/parallel/InteriorBoundary.h
@@ -30,10 +30,13 @@
 #include "MacroElement.h"
 #include "Element.h"
 #include "Boundary.h"
+#include "parallel/MeshLevelData.h"
 
 namespace AMDiS {
 
-  typedef std::vector<std::pair<GeoIndex, int> > ExcludeList;
+  using namespace std;
+
+  typedef vector<pair<GeoIndex, int> > ExcludeList;
 
   /// Defines the geometrical objects that forms the boundary;
   struct BoundaryObject {
@@ -128,13 +131,23 @@ namespace AMDiS {
    */
   class InteriorBoundary {
   public:
-    typedef std::map<int, std::vector<AtomicBoundary> > RankToBoundMap;
+    typedef map<int, vector<AtomicBoundary> > RankToBoundMap;
 
     /// Iterator for the interior boundary object.
     class iterator {      
     public:
       iterator(InteriorBoundary &b)
-	: bound(b)
+	: bound(b),
+	  levelData(NULL),
+	  level(0)
+      {
+	reset();
+      }
+
+      iterator(InteriorBoundary &b, MeshLevelData &levelData, int level)
+	: bound(b),
+	  levelData(&levelData),
+	  level(level)
       {
 	reset();
       }
@@ -209,9 +222,13 @@ namespace AMDiS {
     protected:
       RankToBoundMap::iterator mapIt;
 
-      std::vector<AtomicBoundary>::iterator vecIt;
+      vector<AtomicBoundary>::iterator vecIt;
 
       InteriorBoundary &bound;
+
+      MeshLevelData *levelData;
+
+      int level;
     };
 
   public:
@@ -225,19 +242,19 @@ namespace AMDiS {
     AtomicBoundary& getNewAtomic(int rank);
 
     /// Writes this object to a file.
-    void serialize(std::ostream &out);
+    void serialize(ostream &out);
 
     /// Reads the state of an interior boundary from a file.
-    void deserialize(std::istream &in, std::map<int, Element*> &elIndexMap);
+    void deserialize(istream &in, map<int, Element*> &elIndexMap);
 
     /// Compares this interior boundaries with some other. The order of the
     /// boundary elements within the object does not play a role.
     bool operator==(const InteriorBoundary& other) const;
 
   protected:
-    void serializeExcludeList(std::ostream &out, ExcludeList &list);
+    void serializeExcludeList(ostream &out, ExcludeList &list);
 
-    void deserializeExcludeList(std::istream &in, ExcludeList &list);
+    void deserializeExcludeList(istream &in, ExcludeList &list);
 
   public:
     RankToBoundMap boundary;
diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc
index c0e642b5..e94a1fbd 100644
--- a/AMDiS/src/parallel/MeshDistributor.cc
+++ b/AMDiS/src/parallel/MeshDistributor.cc
@@ -1874,16 +1874,22 @@ namespace AMDiS {
     sendDofs.clear();
     recvDofs.clear();
 
+    int nLevels = levelData.getLevelNumber();
+
     for (unsigned int i = 0; i < feSpaces.size(); i++)
-      createBoundaryDofs(feSpaces[i]);
+      for (int j = 0; j < nLevels; j++)
+	createBoundaryDofs(feSpaces[i], j);
   }
 
 
-  void MeshDistributor::createBoundaryDofs(const FiniteElemSpace *feSpace)
+  void MeshDistributor::createBoundaryDofs(const FiniteElemSpace *feSpace, int level)
   {
     FUNCNAME("MeshDistributor::createBoundaryDofs()");
 
     if (createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) {
+      
+      TEST_EXIT(level == 0)
+	("This function does not support the usage of multi level structure!\n");
 
       // === Clear data. ===
       for (int geo = FACE; geo >= VERTEX; geo--)
@@ -1896,7 +1902,7 @@ namespace AMDiS {
 	    DofContainer dofs;
 	    it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
 
-	    DofContainer& tmp = sendDofs.getDofCont(it.getRank(), feSpace);
+	    DofContainer& tmp = sendDofs.getDofContainer(it.getRank(), feSpace);
 	    tmp.insert(tmp.end(), dofs.begin(), dofs.end());
 
 	    if (createBoundaryDofFlag.isSet(BOUNDARY_FILL_INFO_SEND_DOFS))
@@ -1912,7 +1918,7 @@ namespace AMDiS {
 	    DofContainer dofs;
 	    it->rankObj.el->getAllDofs(feSpace, it->rankObj, dofs);
 
-	    DofContainer& tmp = recvDofs.getDofCont(it.getRank(), feSpace);
+	    DofContainer& tmp = recvDofs.getDofContainer(it.getRank(), feSpace);
 	    tmp.insert(tmp.end(), dofs.begin(), dofs.end());
 
 	    if (createBoundaryDofFlag.isSet(BOUNDARY_FILL_INFO_RECV_DOFS))
@@ -1921,13 +1927,15 @@ namespace AMDiS {
 	}
       }
     } else {
-      for (InteriorBoundary::iterator it(rankIntBoundary); !it.end(); ++it)
+      for (InteriorBoundary::iterator it(rankIntBoundary, levelData, level);
+	   !it.end(); ++it)
 	it->rankObj.el->getAllDofs(feSpace, it->rankObj, 
-				   sendDofs.getDofCont(it.getRank(), feSpace));
+				   sendDofs.getDofContainer(it.getRank(), feSpace));
       
-      for (InteriorBoundary::iterator it(otherIntBoundary); !it.end(); ++it)
+      for (InteriorBoundary::iterator it(otherIntBoundary, levelData, level); 
+	   !it.end(); ++it)
 	it->rankObj.el->getAllDofs(feSpace, it->rankObj, 
-				   recvDofs.getDofCont(it.getRank(), feSpace));
+				   recvDofs.getDofContainer(it.getRank(), feSpace));
     }
 
     // === Delete all empty DOF send and recv positions ===
@@ -2010,8 +2018,12 @@ namespace AMDiS {
     sort(rankDofs.begin(), rankDofs.end(), cmpDofsByValue);
     int nRankAllDofs = rankDofs.size();
 
+
     // === Traverse interior boundaries and get all DOFs on them. ===
-    createBoundaryDofs(feSpace);
+
+    int nLevels = levelData.getLevelNumber();
+    for (int i = 0; i < nLevels; i++)
+      createBoundaryDofs(feSpace, i);
 
     // All DOFs that must be received are DOFs not owned by rank and have 
     // therefore to be removed from the set 'rankDofs'.
@@ -2170,7 +2182,7 @@ namespace AMDiS {
 	// Here we have a periodic boundary between two ranks.
 
 	// Create DOF indices on the boundary. 
-	DofContainer& dofs = periodicDofs.getDofCont(it->first, feSpace);
+	DofContainer& dofs = periodicDofs.getDofContainer(it->first, feSpace);
 	for (vector<AtomicBoundary>::iterator boundIt = it->second.begin();
 	     boundIt != it->second.end(); ++boundIt) {
 
@@ -2203,7 +2215,7 @@ namespace AMDiS {
 
     for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
 	 it != periodicBoundary.boundary.end(); ++it) {
-      DofContainer& dofs = periodicDofs.getDofCont(it->first, feSpace);
+      DofContainer& dofs = periodicDofs.getDofContainer(it->first, feSpace);
       vector<int>& types = rankToDofType[it->first];
 
       TEST_EXIT_DBG(dofs.size() == types.size())("Should not happen!\n");
diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h
index a4de458d..1fe305d9 100644
--- a/AMDiS/src/parallel/MeshDistributor.h
+++ b/AMDiS/src/parallel/MeshDistributor.h
@@ -472,7 +472,7 @@ namespace AMDiS {
 
     void createBoundaryDofs();
 
-    void createBoundaryDofs(const FiniteElemSpace *feSpace);
+    void createBoundaryDofs(const FiniteElemSpace *feSpace, int level);
 
     /// Removes all macro elements from the mesh that are not part of ranks 
     /// partition.
diff --git a/AMDiS/src/parallel/ParallelDebug.cc b/AMDiS/src/parallel/ParallelDebug.cc
index ed6bf80d..427599af 100644
--- a/AMDiS/src/parallel/ParallelDebug.cc
+++ b/AMDiS/src/parallel/ParallelDebug.cc
@@ -454,7 +454,8 @@ namespace AMDiS {
 	    MSG("%s\n", oss.str().c_str());
 	    
 	    debug::printInfoByDof(feSpace, 
-				  *(pdb.recvDofs.getDofCont(it->first, feSpace)[i]));
+				  *(pdb.recvDofs.getDofContainer(it->first, 
+								 feSpace)[i]));
 	  }
 	  ERROR("Wrong DOFs in rank %d!\n", pdb.mpiRank);
 	  foundError = 1;
diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc
index 69db7a8f..5a8d36c3 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.cc
+++ b/AMDiS/src/parallel/PetscSolverFeti.cc
@@ -188,7 +188,8 @@ namespace AMDiS {
     : PetscSolver(),
       schurPrimalSolver(0),
       multiLevelTest(false),
-      subDomainSolver(NULL)
+      subDomainSolver(NULL),
+      meshLevel(0)
   {
     FUNCNAME("PetscSolverFeti::PetscSolverFeti()");
 
@@ -214,6 +215,8 @@ namespace AMDiS {
        schurPrimalSolver);
 
     Parameters::get("parallel->multi level test", multiLevelTest);
+    if (multiLevelTest)
+      meshLevel = 1;
   }
 
 
@@ -221,6 +224,9 @@ namespace AMDiS {
   {
     FUNCNAME("PetscSolverFeti::initialize()");
 
+    TEST_EXIT_DBG(meshLevel + 1 == meshDistributor->getMeshLevelData().getLevelNumber())
+      ("Mesh hierarchy does not contain %d levels!\n", meshLevel + 1);
+
     if (subDomainSolver == NULL)
       subDomainSolver = new SubDomainSolver(meshDistributor, mpiComm, mpiSelfComm);
 
@@ -1253,11 +1259,10 @@ namespace AMDiS {
     int debug = 0;
     Parameters::get("parallel->debug feti", debug);
 
-    if (debug) {
+    if (debug)
       solveFetiMatrix(vec);
-    } else {
+    else
       solveReducedFetiMatrix(vec);
-    } 
 
     MeshDistributor::globalMeshDistributor->synchVector(vec);
   }
diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h
index a889a6b9..912e94ef 100644
--- a/AMDiS/src/parallel/PetscSolverFeti.h
+++ b/AMDiS/src/parallel/PetscSolverFeti.h
@@ -243,6 +243,8 @@ namespace AMDiS {
     bool multiLevelTest;
 
     SubDomainSolver *subDomainSolver;
+
+    int meshLevel;
   };
 
 }
-- 
GitLab