From 1e7a784a79aafdeb35d5eb9fbf7dafca5e0e0e19 Mon Sep 17 00:00:00 2001
From: Thomas Witkowski <thomas.witkowski@gmx.de>
Date: Wed, 30 May 2012 09:26:03 +0000
Subject: [PATCH] Bugfix for setDofVector in global matrix solver when using a
 coarse space.

---
 AMDiS/src/parallel/ParallelDofMapping.h       |  2 +-
 AMDiS/src/parallel/PetscProblemStat.cc        | 28 ++++++++
 AMDiS/src/parallel/PetscSolverGlobalMatrix.cc | 65 ++++++++++---------
 3 files changed, 65 insertions(+), 30 deletions(-)

diff --git a/AMDiS/src/parallel/ParallelDofMapping.h b/AMDiS/src/parallel/ParallelDofMapping.h
index 02be39fa..e8ff50e6 100644
--- a/AMDiS/src/parallel/ParallelDofMapping.h
+++ b/AMDiS/src/parallel/ParallelDofMapping.h
@@ -120,7 +120,7 @@ namespace AMDiS {
     /// global index must not be set.
     MultiIndex& operator[](DegreeOfFreedom d)
     {
-      TEST_EXIT_DBG(dofMap.count(d))("Should not happen!\n");
+      TEST_EXIT_DBG(dofMap.count(d))("DOF %d is not in map!\n", d);
 
       return dofMap[d];
     }
diff --git a/AMDiS/src/parallel/PetscProblemStat.cc b/AMDiS/src/parallel/PetscProblemStat.cc
index 72569145..251bd0f7 100644
--- a/AMDiS/src/parallel/PetscProblemStat.cc
+++ b/AMDiS/src/parallel/PetscProblemStat.cc
@@ -86,6 +86,16 @@ namespace AMDiS {
 
     double wtime = MPI::Wtime();
 
+#if 0
+    double vm, rss;
+    processMemUsage(vm, rss);       
+    MSG("STAGE 1\n");
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
+    mpi::globalAdd(vm);
+    mpi::globalAdd(rss);
+    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
+#endif
+
     if (createMatrixData) {
       petscSolver->setMeshDistributor(meshDistributor, 
 				      meshDistributor->getMpiComm(),
@@ -96,6 +106,15 @@ namespace AMDiS {
 
     petscSolver->fillPetscRhs(rhs);
 
+#if 0
+    processMemUsage(vm, rss);   
+    MSG("STAGE 2\n");
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
+    mpi::globalAdd(vm);
+    mpi::globalAdd(rss);
+    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
+#endif
+
     petscSolver->solvePetscMatrix(*solution, adaptInfo);   
 
     petscSolver->destroyVectorData();
@@ -103,6 +122,15 @@ namespace AMDiS {
     if (!storeMatrixData)
       petscSolver->destroyMatrixData();
 
+#if 0
+    processMemUsage(vm, rss);   
+    MSG("STAGE 3\n");
+    MSG("My memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);    
+    mpi::globalAdd(vm);
+    mpi::globalAdd(rss);
+    MSG("Overall memory usage is VM = %.1f MB    RSS = %.1f MB\n", vm, rss);
+#endif
+
     INFO(info, 8)("solution of discrete system needed %.5f seconds\n", 
 		  MPI::Wtime() - wtime);
   }
diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
index 21a19842..78146dc3 100644
--- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
+++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc
@@ -450,9 +450,16 @@ namespace AMDiS {
     MatDestroy(&matIntInt);
     KSPDestroy(&kspInterior);
 
-    if (petscSolVec != PETSC_NULL) 
+    if (coarseSpaceMap) {
+      MatDestroy(&matCoarseCoarse);
+      MatDestroy(&matCoarseInt);
+      MatDestroy(&matIntCoarse);
+    }
+
+    if (petscSolVec != PETSC_NULL) {
       VecDestroy(&petscSolVec);
-    petscSolVec = PETSC_NULL;
+      petscSolVec = PETSC_NULL;
+    }
   }
 
 
@@ -461,6 +468,9 @@ namespace AMDiS {
     FUNCNAME("PetscSolverGlobalMatrix::destroyVectorData()");
 
     VecDestroy(&rhsInterior);
+
+    if (coarseSpaceMap)
+      VecDestroy(&rhsCoarseSpace);
   }
 
 
@@ -702,22 +712,26 @@ namespace AMDiS {
       if (rankOnly && !(*interiorMap)[feSpace].isRankDof(dofIt.getDOFIndex()))
 	continue;
 
-      // Calculate global row index of the DOF.
-      DegreeOfFreedom globalRowDof = 
-	(*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
-      // Get PETSc's mat index of the row DOF.
-      int index = 0;
-      if (interiorMap->isMatIndexFromGlobal())
-	index = 
-	  interiorMap->getMatIndex(nRowVec, globalRowDof) + rStartInterior;
-      else
-	index =
-	  interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()) + rStartInterior;
-
-      if (perMap.isPeriodic(feSpace, globalRowDof)) {
-	if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) {
-	  ERROR_EXIT("Periodic coarse space not yet supported!\n");
-	} else {
+      if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) {
+	TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n");
+
+	int index = coarseSpaceMap->getMatIndex(nRowVec, dofIt.getDOFIndex());
+	VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES);
+      } else {
+	// Calculate global row index of the DOF.
+	DegreeOfFreedom globalRowDof = 
+	  (*interiorMap)[feSpace][dofIt.getDOFIndex()].global;
+	
+	// Get PETSc's mat index of the row DOF.
+	int index = 0;
+	if (interiorMap->isMatIndexFromGlobal())
+	  index = 
+	    interiorMap->getMatIndex(nRowVec, globalRowDof) + rStartInterior;
+	else
+	  index =
+	    interiorMap->getMatIndex(nRowVec, dofIt.getDOFIndex()) + rStartInterior;
+	
+	if (perMap.isPeriodic(feSpace, globalRowDof)) {
 	  std::set<int>& perAsc = perMap.getAssociations(feSpace, globalRowDof);
 	  double value = *dofIt / (perAsc.size() + 1.0);
 	  VecSetValue(vecInterior, index, value, ADD_VALUES);
@@ -727,17 +741,10 @@ namespace AMDiS {
 	    int mappedDof = perMap.map(feSpace, *perIt, globalRowDof);
 	    int mappedIndex = interiorMap->getMatIndex(nRowVec, mappedDof);
 	    VecSetValue(vecInterior, mappedIndex, value, ADD_VALUES);
-	  }
-	}
-      } else {	  
-	// The DOF index is not periodic.
-
-	if (coarseSpaceMap && isCoarseSpace(feSpace, dofIt.getDOFIndex())) {
-	  TEST_EXIT_DBG(vecCoarse != PETSC_NULL)("Should not happen!\n");
-
-	  index = coarseSpaceMap->getMatIndex(nRowVec, dofIt.getDOFIndex());
-	  VecSetValue(vecCoarse, index, *dofIt, ADD_VALUES);
-	} else {
+	  }	  
+	} else {	  
+	  // The DOF index is not periodic.
+	  
 	  VecSetValue(vecInterior, index, *dofIt, ADD_VALUES);
 	}
       }
-- 
GitLab