Commit ae15b32c authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Now it compiles but does not work, for sure.

parent 586b8402
...@@ -19,17 +19,16 @@ namespace AMDiS { ...@@ -19,17 +19,16 @@ namespace AMDiS {
void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat) void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
{ {
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()"); FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
#if 0
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n"); TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n"); TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n"); TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");
mat.resize(1); double wtime = MPI::Wtime();
mat[0].resize(1);
Mat &matIntInt = mat[0][0];
petscData.create(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal);
double wtime = MPI::Wtime();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = seqMat->getNumRows(); nComponents = seqMat->getNumRows();
int nRankRows = (*interiorMap)[feSpace].nRankDofs; int nRankRows = (*interiorMap)[feSpace].nRankDofs;
...@@ -87,22 +86,22 @@ namespace AMDiS { ...@@ -87,22 +86,22 @@ namespace AMDiS {
MatCreateNest(mpiCommGlobal, MatCreateNest(mpiCommGlobal,
nBlocks, PETSC_NULL, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &matIntInt); &(nestMat[0]), &petscData.getInteriorMat());
#if (DEBUG != 0) #if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime); MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif #endif
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY); petscData.assembly();
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
// === Init PETSc solver. === // === Init PETSc solver. ===
KSPCreate(mpiCommGlobal, &kspInterior); KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN); KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(), SAME_NONZERO_PATTERN);
KSPSetFromOptions(kspInterior); KSPSetFromOptions(kspInterior);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime); MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
} }
......
...@@ -180,9 +180,8 @@ namespace AMDiS { ...@@ -180,9 +180,8 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverSchur::fillPetscMatrix()"); FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
mat.resize(1); petscData.create(interiorMap, coarseSpaceMap,
mat[0].resize(1); subdomainLevel, mpiCommLocal, mpiCommGlobal);
Mat &matIntInt = mat[0][0];
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nComponents = seqMat->getNumRows(); int nComponents = seqMat->getNumRows();
...@@ -242,11 +241,11 @@ namespace AMDiS { ...@@ -242,11 +241,11 @@ namespace AMDiS {
tmpIS[0] = interiorIs; tmpIS[0] = interiorIs;
tmpIS[1] = boundaryIs; tmpIS[1] = boundaryIs;
MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], &matIntInt); MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0],
MatNestSetVecType(matIntInt, VECNEST); &petscData.getInteriorMat());
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY); MatNestSetVecType(petscData.getInteriorMat(), VECNEST);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
petscData.assembly();
int nRankRows = (*interiorMap)[feSpace].nRankDofs * nComponents; int nRankRows = (*interiorMap)[feSpace].nRankDofs * nComponents;
int nOverallRows = (*interiorMap)[feSpace].nOverallDofs * nComponents; int nOverallRows = (*interiorMap)[feSpace].nOverallDofs * nComponents;
...@@ -284,7 +283,9 @@ namespace AMDiS { ...@@ -284,7 +283,9 @@ namespace AMDiS {
KSPCreate(mpiCommGlobal, &kspInterior); KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, mat[0][0], mat[0][0], SAME_NONZERO_PATTERN); KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(), SAME_NONZERO_PATTERN);
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT); KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetFromOptions(kspInterior); KSPSetFromOptions(kspInterior);
...@@ -335,7 +336,8 @@ namespace AMDiS { ...@@ -335,7 +336,8 @@ namespace AMDiS {
MatDestroy(&matA12); MatDestroy(&matA12);
MatDestroy(&matA21); MatDestroy(&matA21);
MatDestroy(&matA22); MatDestroy(&matA22);
MatDestroy(&mat[0][0]);
petscData.destroy();
KSPDestroy(&kspInterior); KSPDestroy(&kspInterior);
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment