Skip to content
Snippets Groups Projects
Commit ae15b32c authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Now it compiles but does not work, for sure.

parent 586b8402
No related branches found
No related tags found
No related merge requests found
......@@ -19,17 +19,16 @@ namespace AMDiS {
void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
#if 0
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");
mat.resize(1);
mat[0].resize(1);
Mat &matIntInt = mat[0][0];
double wtime = MPI::Wtime();
petscData.create(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal);
double wtime = MPI::Wtime();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = seqMat->getNumRows();
int nRankRows = (*interiorMap)[feSpace].nRankDofs;
......@@ -87,22 +86,22 @@ namespace AMDiS {
MatCreateNest(mpiCommGlobal,
nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &matIntInt);
&(nestMat[0]), &petscData.getInteriorMat());
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
petscData.assembly();
// === Init PETSc solver. ===
KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN);
KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(), SAME_NONZERO_PATTERN);
KSPSetFromOptions(kspInterior);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
}
......
......@@ -180,9 +180,8 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
mat.resize(1);
mat[0].resize(1);
Mat &matIntInt = mat[0][0];
petscData.create(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nComponents = seqMat->getNumRows();
......@@ -242,11 +241,11 @@ namespace AMDiS {
tmpIS[0] = interiorIs;
tmpIS[1] = boundaryIs;
MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0], &matIntInt);
MatNestSetVecType(matIntInt, VECNEST);
MatAssemblyBegin(matIntInt, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matIntInt, MAT_FINAL_ASSEMBLY);
MatCreateNest(mpiCommGlobal, 2, &tmpIS[0], 2, &tmpIS[0], &tmpMat[0][0],
&petscData.getInteriorMat());
MatNestSetVecType(petscData.getInteriorMat(), VECNEST);
petscData.assembly();
int nRankRows = (*interiorMap)[feSpace].nRankDofs * nComponents;
int nOverallRows = (*interiorMap)[feSpace].nOverallDofs * nComponents;
......@@ -284,7 +283,9 @@ namespace AMDiS {
KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, mat[0][0], mat[0][0], SAME_NONZERO_PATTERN);
KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(), SAME_NONZERO_PATTERN);
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetFromOptions(kspInterior);
......@@ -335,7 +336,8 @@ namespace AMDiS {
MatDestroy(&matA12);
MatDestroy(&matA21);
MatDestroy(&matA22);
MatDestroy(&mat[0][0]);
petscData.destroy();
KSPDestroy(&kspInterior);
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment