diff --git a/AMDiS/src/DOFMatrix.cc b/AMDiS/src/DOFMatrix.cc index 20f90cf4cda1f8d72fedf7b6ce07d8f4ee45a0aa..7b78a901e2e74271fe6ce99f853c38fcbbe785c1 100644 --- a/AMDiS/src/DOFMatrix.cc +++ b/AMDiS/src/DOFMatrix.cc @@ -243,6 +243,65 @@ namespace AMDiS { } + void DOFMatrix::assembleOperator(Operator &op) + { + FUNCNAME("DOFMatrix::assembleOperator()"); + + TEST_EXIT(rowFeSpace->getMesh() == colFeSpace->getMesh()) + ("This function does not support for multi mesh procedure!\n"); + TEST_EXIT(op.getRowFeSpace() == rowFeSpace) + ("Row FE spaces do not fit together!\n"); + TEST_EXIT(op.getColFeSpace() == colFeSpace) + ("Column FE spaces do not fit together!\n"); + + clearOperators(); + addOperator(&op); + + matrix.change_dim(rowFeSpace->getAdmin()->getUsedSize(), + colFeSpace->getAdmin()->getUsedSize()); + + Mesh *mesh = rowFeSpace->getMesh(); + mesh->dofCompress(); + const BasisFunction *basisFcts = rowFeSpace->getBasisFcts(); + + Flag assembleFlag = getAssembleFlag() | + Mesh::CALL_LEAF_EL | + Mesh::FILL_COORDS | + Mesh::FILL_DET | + Mesh::FILL_GRD_LAMBDA | + Mesh::FILL_NEIGH | + Mesh::FILL_BOUND; + + BoundaryType *bound = new BoundaryType[basisFcts->getNumber()]; + + calculateNnz(); + if (getBoundaryManager()) + getBoundaryManager()->initMatrix(this); + + startInsertion(getNnz()); + + TraverseStack stack; + ElInfo *elInfo = stack.traverseFirst(mesh, -1, assembleFlag); + while (elInfo) { + basisFcts->getBound(elInfo, bound); + + assemble(1.0, elInfo, bound); + + if (getBoundaryManager()) + getBoundaryManager()->fillBoundaryConditions(elInfo, this); + + elInfo = stack.traverseNext(elInfo); + } + + clearDirichletRows(); + finishAssembling(); + finishInsertion(); + getBoundaryManager()->exitMatrix(this); + + delete [] bound; + } + + double DOFMatrix::logAcc(DegreeOfFreedom a, DegreeOfFreedom b) const { return matrix[a][b]; @@ -396,13 +455,13 @@ namespace AMDiS { void DOFMatrix::axpy(double a, const DOFMatrix& x, const DOFMatrix& y) { - matrix+= a * x.matrix + y.matrix; + matrix += a * x.matrix + y.matrix; } void DOFMatrix::scal(double b) { - matrix*= b; + matrix *= b; } @@ -414,6 +473,14 @@ namespace AMDiS { } + void DOFMatrix::clearOperators() + { + operators.clear(); + operatorFactor.clear(); + operatorEstFactor.clear(); + } + + void DOFMatrix::serialize(std::ostream &out) { using namespace mtl; diff --git a/AMDiS/src/DOFMatrix.h b/AMDiS/src/DOFMatrix.h index d000661864cdc5328aa2bf5c811b2e807de0d331..e9d0a3cd22973298767639f4eca9d8c43fd5cb43 100644 --- a/AMDiS/src/DOFMatrix.h +++ b/AMDiS/src/DOFMatrix.h @@ -172,6 +172,9 @@ namespace AMDiS { double* factor = NULL, double* estFactor = NULL); + /// + void clearOperators(); + inline vector::iterator getOperatorFactorBegin() { return operatorFactor.begin(); @@ -250,6 +253,9 @@ namespace AMDiS { ElInfo* rowElInfo, ElInfo* colElInfo); + /// + void assembleOperator(Operator &op); + /// That function must be called after the matrix assembling has been /// finished. This makes it possible to start some cleanup or matrix /// data compressing procedures. diff --git a/AMDiS/src/ProblemStat.h b/AMDiS/src/ProblemStat.h index 707e3b87dbefa97c16ca25e9ccc8e8b5d35cb046..ee6ead03d1988b525790d572b9bdf16c28d029c0 100644 --- a/AMDiS/src/ProblemStat.h +++ b/AMDiS/src/ProblemStat.h @@ -263,7 +263,8 @@ namespace AMDiS { DOFVector *solution, Mesh *mesh, Flag assembleFlag); - + + /** \name getting methods * \{ */ diff --git a/AMDiS/src/parallel/MatrixNnzStructure.cc b/AMDiS/src/parallel/MatrixNnzStructure.cc index c7d4c977f60237f3bd7aff902e965a9cdfdc654f..1997e38260d1c9ecb37785ce52017d64ac07300a 100644 --- a/AMDiS/src/parallel/MatrixNnzStructure.cc +++ b/AMDiS/src/parallel/MatrixNnzStructure.cc @@ -311,7 +311,7 @@ namespace AMDiS { // matrices, the problem may arise, that the result is larger than the // number of elements in a row. This is fixed in the following. - if (nRankRows < 100) + if (nRankRows < 100 || nRankCols < 100) for (int i = 0; i < nRankRows; i++) dnnz[i] = std::min(dnnz[i], nRankCols); diff --git a/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc b/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc index cad7db04841f57c8601d404c89df93018e7605e7..cc12d3d2476cbdb4190edfbe5074397e646c5f22 100644 --- a/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc +++ b/AMDiS/src/parallel/ParallelCoarseSpaceMatVec.cc @@ -197,7 +197,7 @@ namespace AMDiS { int nColsRankMat = (j == 0 ? nRankInteriorRows : uniqueCoarseMap[j - 1]->getRankDofs()); int nColsOverallMat = (j == 0 ? nOverallInteriorRows : uniqueCoarseMap[j - 1]->getOverallDofs()); - + MatCreateAIJ(mpiCommGlobal, nRowsRankMat, nColsRankMat, nRowsOverallMat, nColsOverallMat, diff --git a/AMDiS/src/parallel/PetscSolver.cc b/AMDiS/src/parallel/PetscSolver.cc index 619801b976b37357dd879a22c9cf6d99d50d2020..a554c166275e475169275dc862e7b9417fac3888 100644 --- a/AMDiS/src/parallel/PetscSolver.cc +++ b/AMDiS/src/parallel/PetscSolver.cc @@ -38,6 +38,14 @@ namespace AMDiS { } + void PetscSolver::fillPetscMatrix(DOFMatrix* mat) + { + Matrix sysMat(1, 1); + sysMat[0][0] = mat; + fillPetscMatrix(&sysMat); + } + + void PetscSolver::solve(Vec &rhs, Vec &sol) { FUNCNAME("PetscSolver::solve()"); diff --git a/AMDiS/src/parallel/PetscSolver.h b/AMDiS/src/parallel/PetscSolver.h index 2a541b8f0e72fbec2a539154717f8f2bd9c68db9..759f076e9e3ab175669e80b4316af2c23c621205 100644 --- a/AMDiS/src/parallel/PetscSolver.h +++ b/AMDiS/src/parallel/PetscSolver.h @@ -62,6 +62,9 @@ namespace AMDiS { */ virtual void fillPetscMatrix(Matrix *mat) = 0; + /// + void fillPetscMatrix(DOFMatrix* mat); + /** \brief * Create a PETSc vector and fills it with the rhs values of the system. * diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 6403ac73c4b690f13ffedae1c4e871c70979a535..da0f522c19c349211eae99d1a3535716b0628845 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -25,28 +25,28 @@ namespace AMDiS { using namespace std; - PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *dummy) + PetscErrorCode KSPMonitorFeti(KSP ksp, PetscInt n, PetscReal rnorm, void *data) { - Vec Br,v,w; - Mat A; + Vec Br,v,w; + VecDuplicate(static_cast(data)->draft, &v); + VecDuplicate(static_cast(data)->draft, &w); - KSPGetOperators(ksp, &A, PETSC_NULL, PETSC_NULL); - MatGetVecs(A, &w, &v); KSPBuildResidual(ksp, v, w, &Br); Vec nest0, nest1; VecNestGetSubVec(Br, 0, &nest0); VecNestGetSubVec(Br, 1, &nest1); - PetscScalar norm0, norm1; + PetscScalar norm, norm0, norm1; + VecNorm(Br, NORM_2, &norm); VecNorm(nest0, NORM_2, &norm0); VecNorm(nest1, NORM_2, &norm1); VecDestroy(&v); VecDestroy(&w); - PetscPrintf(PETSC_COMM_WORLD, "%3D KSP Component residual norm [%1.12e %1.12e] and relative [%1.12e]\n", - norm0, norm1, rnorm); + PetscPrintf(PETSC_COMM_WORLD, "%3D KSP residual norm %1.12e [%1.12e %1.12e] and preconditioned norm [%1.12e]\n", + n, norm, norm0, norm1, rnorm); return 0; } @@ -56,6 +56,7 @@ namespace AMDiS { schurPrimalSolver(0), multiLevelTest(false), subdomain(NULL), + massMatrixSolver(NULL), meshLevel(0), rStartInterior(0), nGlobalOverallInterior(0), @@ -867,7 +868,8 @@ namespace AMDiS { KSPSetType(ksp_feti, KSPGMRES); KSPSetTolerances(ksp_feti, 0, 1e-8, 1e+3, 1000); KSPSetFromOptions(ksp_feti); - + if (stokesMode) + KSPMonitorSet(ksp_feti, KSPMonitorFeti, &fetiKspData, PETSC_NULL); @@ -979,6 +981,8 @@ namespace AMDiS { } if (stokesMode) { + // === Create H2 vec === + DOFVector tmpvec(pressureFeSpace, "tmpvec"); createH2vec(tmpvec); interfaceDofMap.createVec(fetiInterfaceLumpedPreconData.h2vec); @@ -994,19 +998,44 @@ namespace AMDiS { VecAssemblyBegin(fetiInterfaceLumpedPreconData.h2vec); VecAssemblyEnd(fetiInterfaceLumpedPreconData.h2vec); + + // === Create mass matrix solver === + + if (!massMatrixSolver) { + MSG("START CREATE MASS MATRIX!\n"); + DOFMatrix massMatrix(pressureFeSpace, pressureFeSpace); + Operator op(pressureFeSpace, pressureFeSpace); + Simple_ZOT zot; + op.addTerm(&zot); + massMatrix.assembleOperator(op); + + ParallelDofMapping massMatMapping = interfaceDofMap; + massMatrixSolver = new PetscSolverGlobalMatrix; + massMatrixSolver->setKspPrefix("mass_"); + massMatrixSolver->setMeshDistributor(meshDistributor, + mpiCommGlobal, + mpiCommLocal); + massMatrixSolver->setDofMapping(&massMatMapping); + massMatrixSolver->fillPetscMatrix(&massMatrix); + + int r, c; + MatGetSize(massMatrixSolver->getMatInterior(), &r, &c); + MatInfo info; + MatGetInfo(massMatrixSolver->getMatInterior(), MAT_GLOBAL_SUM, &info); + MSG("MASS MAT INFO: size = %d x %d nnz = %d\n", + r, c, + static_cast(info.nz_used)); + MSG("END CREATE MASS MATRIX!\n"); + + fetiInterfaceLumpedPreconData.ksp_mass = + massMatrixSolver->getSolver(); + } + + // === Create tmp vectors === localDofMap.createVec(fetiInterfaceLumpedPreconData.tmp_vec_b1); primalDofMap.createVec(fetiInterfaceLumpedPreconData.tmp_primal); fetiInterfaceLumpedPreconData.subSolver = subdomain; - - - KSPCreate(PETSC_COMM_WORLD, &fetiInterfaceLumpedPreconData.ksp_primal); - KSPSetOperators(fetiInterfaceLumpedPreconData.ksp_primal, - subdomain->getMatCoarse(0, 0), - subdomain->getMatCoarse(0, 0), - SAME_NONZERO_PATTERN); - KSPSetOptionsPrefix(fetiInterfaceLumpedPreconData.ksp_primal, "primal_"); - KSPSetFromOptions(fetiInterfaceLumpedPreconData.ksp_primal); } KSPGetPC(ksp_feti, &precon_feti); @@ -1455,7 +1484,7 @@ namespace AMDiS { void PetscSolverFeti::fillPetscMatrix(Matrix *mat) { FUNCNAME("PetscSolverFeti::fillPetscMatrix()"); - + // === Create all sets and indices. === vector feSpaces = AMDiS::getComponentFeSpaces(*mat); @@ -1790,6 +1819,7 @@ namespace AMDiS { VecAssemblyEnd(vecSol); } + VecDuplicate(vecSol, &fetiKspData.draft); // === Create reduced RHS === @@ -2028,10 +2058,12 @@ namespace AMDiS { meshDistributor->synchAddVector(vec); + double scalePower = 2.0; + Parameters::get("lp->scale power", scalePower); DOFIterator dofIt(&vec, USED_DOFS); for (dofIt.reset(); !dofIt.end(); ++dofIt) { DegreeOfFreedom d = dofIt.getDOFIndex(); - vec[d] = 1.0 / pow(vec[d], 2.0); + vec[d] = 1.0 / pow(vec[d], scalePower); } } diff --git a/AMDiS/src/parallel/PetscSolverFeti.h b/AMDiS/src/parallel/PetscSolverFeti.h index a16e93a02564e81409cc2eb907d5dafdd683087e..3cf47f3afe4cad189e96c182ff22eec24691b963 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.h +++ b/AMDiS/src/parallel/PetscSolverFeti.h @@ -313,6 +313,8 @@ namespace AMDiS { FetiInterfaceLumpedPreconData fetiInterfaceLumpedPreconData; + FetiKspData fetiKspData; + /// Matrices for Dirichlet preconditioner. Mat mat_interior_interior, mat_duals_duals, mat_interior_duals, mat_duals_interior; @@ -322,6 +324,8 @@ namespace AMDiS { PetscSolver *subdomain; + PetscSolver *massMatrixSolver; + int meshLevel; int rStartInterior; diff --git a/AMDiS/src/parallel/PetscSolverFetiOperators.cc b/AMDiS/src/parallel/PetscSolverFetiOperators.cc index 72add750754926bb5dc61049dfa6f3c01df22a19..3e9ed7159fd29ee65218948a41f53c84632a14b5 100644 --- a/AMDiS/src/parallel/PetscSolverFetiOperators.cc +++ b/AMDiS/src/parallel/PetscSolverFetiOperators.cc @@ -301,15 +301,21 @@ namespace AMDiS { VecNestGetSubVec(yvec, 0, &y_interface); VecNestGetSubVec(yvec, 1, &y_lagrange); - VecCopy(x_interface, y_interface); - double scaleFactor = 1.0; - Parameters::get("lp->scal", scaleFactor); - bool mult = false; - Parameters::get("lp->mult", mult); - if (mult) - VecPointwiseMult(y_interface, x_interface, data->h2vec); - if (scaleFactor != 0.0) - VecScale(y_interface, scaleFactor); + bool useMassMatrix = false; + Parameters::get("lp->mass matrix", useMassMatrix); + if (useMassMatrix) { + KSPSolve(data->ksp_mass, x_interface, y_interface); + } else { + VecCopy(x_interface, y_interface); + double scaleFactor = 1.0; + Parameters::get("lp->scal", scaleFactor); + bool mult = false; + Parameters::get("lp->mult", mult); + if (mult) + VecPointwiseMult(y_interface, x_interface, data->h2vec); + if (scaleFactor != 0.0) + VecScale(y_interface, scaleFactor); + } MatMultTranspose(*(data->mat_lagrange_scaled), x_lagrange, data->tmp_vec_b0); diff --git a/AMDiS/src/parallel/PetscSolverFetiStructs.h b/AMDiS/src/parallel/PetscSolverFetiStructs.h index d475728779bed679abd5a6b2be003fb0935f7fbf..72ee3e15fafc10260317644198fa6fd0b6e4183d 100644 --- a/AMDiS/src/parallel/PetscSolverFetiStructs.h +++ b/AMDiS/src/parallel/PetscSolverFetiStructs.h @@ -122,16 +122,16 @@ namespace AMDiS { Vec h2vec; - KSP ksp_primal; - Vec tmp_primal; - KSP ksp_feti_ex; - - Mat mat_feti_ex; + KSP ksp_mass; }; + struct FetiKspData { + Vec draft; + }; + typedef enum { FETI_NONE = 0, FETI_DIRICHLET = 1, diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 24b2da760256dc4949da42397b5f4b77397da489..beecd4500a8235ed6fc86c7573ca5cea4e6181c7 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -524,8 +524,6 @@ namespace AMDiS { const FiniteElemSpace *rowFe = seqMat->getRowFeSpace(); const FiniteElemSpace *colFe = seqMat->getColFeSpace(); - DofMap& rowMap = (*interiorMap)[rowFe].getMap(); - DofMap& colMap = (*interiorMap)[colFe].getMap(); // === Traverse all rows of the DOF matrix and insert row wise the values === // === to the PETSc matrix. === @@ -533,7 +531,11 @@ namespace AMDiS { for (cursor_type cursor = begin(seqMat->getBaseMatrix()), cend = end(seqMat->getBaseMatrix()); cursor != cend; ++cursor) { // Global index of the current row DOF. - int globalRowDof = rowMap[*cursor].global; + MultiIndex rowMultiIndex; + if ((*interiorMap)[rowFe].find(*cursor, rowMultiIndex) == false) + continue; + + int globalRowDof = rowMultiIndex.global; // Test if the current row DOF is a periodic DOF. bool periodicRow = perMap.isPeriodic(rowFe, globalRowDof); @@ -551,7 +553,11 @@ namespace AMDiS { icursor != icend; ++icursor) { // Global index of the current column index. - int globalColDof = colMap[col(*icursor)].global; + MultiIndex colMultiIndex; + if ((*interiorMap)[colFe].find(col(*icursor), colMultiIndex) == false) + continue; + + int globalColDof = colMultiIndex.global; // Test if the current col dof is a periodic dof. bool periodicCol = perMap.isPeriodic(colFe, globalColDof); // Get PETSc's mat col index.