From 267dab656e093637da8a07bd45f019a74886f9c9 Mon Sep 17 00:00:00 2001 From: Thomas Witkowski <thomas.witkowski@gmx.de> Date: Mon, 2 Jul 2012 07:41:30 +0000 Subject: [PATCH] Added possibility to analyze matrices from FETI-DP. --- AMDiS/src/parallel/MeshDistributor.cc | 2 +- AMDiS/src/parallel/PetscSolverFeti.cc | 86 +++++++++++++++++++++++++++ 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index 386a578e..936f157c 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -1581,7 +1581,6 @@ namespace AMDiS { dofCommSd.create(intBoundarySd); } - // === If requested, create more information on communication DOFs. === if (!createBoundaryDofFlag.isSet(BOUNDARY_SUBOBJ_SORTED)) @@ -1632,6 +1631,7 @@ namespace AMDiS { } } + void MeshDistributor::removeMacroElements() { FUNCNAME("MeshDistributor::removeMacroElements()"); diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 7dbacfb1..352c0a20 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -1129,6 +1129,92 @@ namespace AMDiS { MatView(subdomain->getMatCoarseCoarse(), petscView); PetscViewerDestroy(&petscView); } + + + int writeSchurPrimalMatrix = 0; + Parameters::get("parallel->debug->write schur primal matrix", + writeSchurPrimalMatrix); + if (writeSchurPrimalMatrix) { + PetscViewer petscView; + PetscViewerBinaryOpen(PETSC_COMM_WORLD, "schurprimal.mat", + FILE_MODE_WRITE, &petscView); + MatView(mat_schur_primal, petscView); + PetscViewerDestroy(&petscView); + } + + + int writeFetiMatrix = 0; + Parameters::get("parallel->debug->write feti matrix", + writeFetiMatrix); + if (writeFetiMatrix) { + MSG("Start creating explicit FETI-DP matrix!\n"); + + Vec unitVector; + Vec resultVector; + + Mat fetiMat; + MatCreateAIJ(mpiCommGlobal, + lagrangeMap.getRankDofs(), + lagrangeMap.getRankDofs(), + lagrangeMap.getOverallDofs(), + lagrangeMap.getOverallDofs(), + lagrangeMap.getOverallDofs(), + PETSC_NULL, + lagrangeMap.getOverallDofs(), + PETSC_NULL, &fetiMat); + + VecCreateMPI(mpiCommGlobal, + lagrangeMap.getRankDofs(), lagrangeMap.getOverallDofs(), + &unitVector); + VecCreateMPI(mpiCommGlobal, + lagrangeMap.getRankDofs(), lagrangeMap.getOverallDofs(), + &resultVector); + + PetscInt low, high; + VecGetOwnershipRange(unitVector, &low, &high); + int nLocal = high - low; + int nnzCounter = 0; + + for (int i = 0; i < lagrangeMap.getOverallDofs(); i++) { + VecSet(unitVector, 0.0); + if (i >= low && i < high) + VecSetValue(unitVector, i, 1.0, INSERT_VALUES); + VecAssemblyBegin(unitVector); + VecAssemblyEnd(unitVector); + + MatMult(mat_feti, unitVector, resultVector); + + PetscScalar *vals; + VecGetArray(resultVector, &vals); + + for (int j = 0; j < nLocal; j++) + if (fabs(vals[j]) > 1e-30) { + MatSetValue(fetiMat, low + j, i, vals[j], INSERT_VALUES); + nnzCounter++; + } + + VecRestoreArray(resultVector, &vals); + } + + MatAssemblyBegin(fetiMat, MAT_FINAL_ASSEMBLY); + MatAssemblyEnd(fetiMat, MAT_FINAL_ASSEMBLY); + + VecDestroy(&unitVector); + VecDestroy(&resultVector); + + mpi::globalAdd(nnzCounter); + + PetscViewer petscView; + PetscViewerBinaryOpen(PETSC_COMM_WORLD, "feti.mat", + FILE_MODE_WRITE, &petscView); + MatView(fetiMat, petscView); + PetscViewerDestroy(&petscView); + + MatDestroy(&fetiMat); + + MSG("FETI-DP matrix written: %d x %d mat with %d nnz\n", + lagrangeMap.getOverallDofs(), lagrangeMap.getOverallDofs(), nnzCounter); + } #endif } -- GitLab