Commit 5574b023 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Fixed some small problems for FETI-DP with mixed finite elements.

parent e8fa1fc3
...@@ -347,7 +347,10 @@ namespace AMDiS { ...@@ -347,7 +347,10 @@ namespace AMDiS {
TraverseStack stack; TraverseStack stack;
ElInfo *elInfo = stack.traverseFirst(fe_space->getMesh(), level, flag); ElInfo *elInfo = stack.traverseFirst(fe_space->getMesh(), level, flag);
double a = 0.0;
double b = 0.0;
while (elInfo) { while (elInfo) {
elinfo = elInfo; elinfo = elInfo;
...@@ -364,18 +367,22 @@ namespace AMDiS { ...@@ -364,18 +367,22 @@ namespace AMDiS {
int_uh_vec += quadFast->getWeight(i) * uh_vec[i]; int_uh_vec += quadFast->getWeight(i) * uh_vec[i];
} }
l2_err_el = pow(fabs(det * int_u_vec - det * int_uh_vec), 2.0); a += det * int_u_vec - det * int_uh_vec;
b += fabs(det * int_u_vec - det * int_uh_vec);
l2_err_el = pow(fabs(det * int_u_vec - det * int_uh_vec), 2.0);
l2Err2 += l2_err_el; l2Err2 += l2_err_el;
maxErr = std::max(maxErr, l2_err_el); maxErr = std::max(maxErr, l2_err_el);
// if (writeInLeafData) if (writeInLeafData)
// elInfo->getElement()->setEstimation(l2_err_el, component); elInfo->getElement()->setEstimation(l2_err_el, component);
estMap[elInfo->getElement()->getIndex()] = l2_err_el; estMap[elInfo->getElement()->getIndex()] = l2_err_el;
elInfo = stack.traverseNext(elInfo); elInfo = stack.traverseNext(elInfo);
} }
MSG("L2ERR values %e %e %e %e\n", a, b, l2Err2, sqrt(l2Err2));
delete [] u_vec; delete [] u_vec;
if (max) if (max)
......
...@@ -113,6 +113,7 @@ namespace AMDiS { ...@@ -113,6 +113,7 @@ namespace AMDiS {
void ParallelDofMapping::init(MPI::Intracomm *m, void ParallelDofMapping::init(MPI::Intracomm *m,
vector<const FiniteElemSpace*> &fe, vector<const FiniteElemSpace*> &fe,
vector<const FiniteElemSpace*> &uniqueFe,
bool needGlobalMapping, bool needGlobalMapping,
bool bNonLocalDofs) bool bNonLocalDofs)
{ {
...@@ -120,17 +121,13 @@ namespace AMDiS { ...@@ -120,17 +121,13 @@ namespace AMDiS {
mpiComm = m; mpiComm = m;
feSpaces = fe; feSpaces = fe;
feSpacesUnique = uniqueFe;
hasNonLocalDofs = bNonLocalDofs; hasNonLocalDofs = bNonLocalDofs;
// === Create a set of unique FE spaces. ===
for (unsigned int i = 0; i < feSpaces.size(); i++)
feSpacesUnique.insert(feSpaces[i]);
// === Init the mapping for all different FE spaces. === // === Init the mapping for all different FE spaces. ===
for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin(); for (vector<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
it != feSpacesUnique.end(); ++it) { it != feSpacesUnique.end(); ++it) {
addFeSpace(*it); addFeSpace(*it);
data[*it].setNeedGlobalMapping(needGlobalMapping); data[*it].setNeedGlobalMapping(needGlobalMapping);
...@@ -143,7 +140,7 @@ namespace AMDiS { ...@@ -143,7 +140,7 @@ namespace AMDiS {
{ {
FUNCNAME("ParallelDofMapping::clear()"); FUNCNAME("ParallelDofMapping::clear()");
for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin(); for (vector<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
it != feSpacesUnique.end(); ++it) it != feSpacesUnique.end(); ++it)
data[*it].clear(); data[*it].clear();
...@@ -164,7 +161,7 @@ namespace AMDiS { ...@@ -164,7 +161,7 @@ namespace AMDiS {
recvDofs = &pRecv; recvDofs = &pRecv;
// Add the DOF communicator also to all FE space DOF mappings. // Add the DOF communicator also to all FE space DOF mappings.
for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin(); for (vector<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
it != feSpacesUnique.end(); ++it) it != feSpacesUnique.end(); ++it)
data[*it].setDofComm(pSend, pRecv); data[*it].setDofComm(pSend, pRecv);
} }
...@@ -244,7 +241,7 @@ namespace AMDiS { ...@@ -244,7 +241,7 @@ namespace AMDiS {
FUNCNAME("ParallelDofMapping::update()"); FUNCNAME("ParallelDofMapping::update()");
// First, update all FE space DOF mappings. // First, update all FE space DOF mappings.
for (std::set<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin(); for (vector<const FiniteElemSpace*>::iterator it = feSpacesUnique.begin();
it != feSpacesUnique.end(); ++it) it != feSpacesUnique.end(); ++it)
data[*it].update(); data[*it].update();
......
...@@ -282,7 +282,10 @@ namespace AMDiS { ...@@ -282,7 +282,10 @@ namespace AMDiS {
* *
* \param[in] m MPI communicator. * \param[in] m MPI communicator.
* \param[in] fe The FE spaces of all components of the * \param[in] fe The FE spaces of all components of the
* PDE to be solved. * PDE to be solved.
* \param[in] uniqueFe Unique list of FE spaces. Thus, two
* arbitrary elements of this list are always
* different.
* \param[in] needGlobalMapping If true, the mapping computes also a global * \param[in] needGlobalMapping If true, the mapping computes also a global
* index for the DOFs. * index for the DOFs.
* \param[in] bNonLocalDofs If true, at least one rank's mapping con- * \param[in] bNonLocalDofs If true, at least one rank's mapping con-
...@@ -290,6 +293,7 @@ namespace AMDiS { ...@@ -290,6 +293,7 @@ namespace AMDiS {
*/ */
void init(MPI::Intracomm *m, void init(MPI::Intracomm *m,
vector<const FiniteElemSpace*> &fe, vector<const FiniteElemSpace*> &fe,
vector<const FiniteElemSpace*> &uniqueFe,
bool needGlobalMapping, bool needGlobalMapping,
bool bNonLocalDofs); bool bNonLocalDofs);
...@@ -404,7 +408,7 @@ namespace AMDiS { ...@@ -404,7 +408,7 @@ namespace AMDiS {
/// The set of all FE spaces. It uniquly contains all different FE spaces /// The set of all FE spaces. It uniquly contains all different FE spaces
/// from \ref feSpaces. /// from \ref feSpaces.
std::set<const FiniteElemSpace*> feSpacesUnique; vector<const FiniteElemSpace*> feSpacesUnique;
/// Number of DOFs owned by rank. /// Number of DOFs owned by rank.
int nRankDofs; int nRankDofs;
......
...@@ -216,9 +216,6 @@ namespace AMDiS { ...@@ -216,9 +216,6 @@ namespace AMDiS {
{ {
FUNCNAME("PetscSolverFeti::updateDofData()"); FUNCNAME("PetscSolverFeti::updateDofData()");
TEST_EXIT(meshDistributor->getFeSpace()->getBasisFcts()->getDegree() == 1)
("Works for linear basis functions only!\n");
primalDofMap.clear(); primalDofMap.clear();
dualDofMap.clear(); dualDofMap.clear();
lagrangeMap.clear(); lagrangeMap.clear();
...@@ -227,6 +224,7 @@ namespace AMDiS { ...@@ -227,6 +224,7 @@ namespace AMDiS {
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i);
createPrimals(feSpace); createPrimals(feSpace);
createDuals(feSpace); createDuals(feSpace);
createLagrange(feSpace); createLagrange(feSpace);
...@@ -247,14 +245,15 @@ namespace AMDiS { ...@@ -247,14 +245,15 @@ namespace AMDiS {
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) { for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i); const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i);
MSG("FETI-DP data for %d-ith FE space:\n", i);
MSG("nRankPrimals = %d nOverallPrimals = %d\n", MSG(" nRankPrimals = %d nOverallPrimals = %d\n",
primalDofMap[feSpace].nRankDofs, primalDofMap[feSpace].nOverallDofs); primalDofMap[feSpace].nRankDofs, primalDofMap[feSpace].nOverallDofs);
MSG("nRankDuals = %d nOverallDuals = %d\n", MSG(" nRankDuals = %d nOverallDuals = %d\n",
dualDofMap[feSpace].nRankDofs, dualDofMap[feSpace].nOverallDofs); dualDofMap[feSpace].nRankDofs, dualDofMap[feSpace].nOverallDofs);
MSG("nRankLagrange = %d nOverallLagrange = %d\n", MSG(" nRankLagrange = %d nOverallLagrange = %d\n",
lagrangeMap[feSpace].nRankDofs, lagrangeMap[feSpace].nOverallDofs); lagrangeMap[feSpace].nRankDofs, lagrangeMap[feSpace].nOverallDofs);
TEST_EXIT_DBG(localDofMap[feSpace].size() + primalDofMap[feSpace].size() == TEST_EXIT_DBG(localDofMap[feSpace].size() + primalDofMap[feSpace].size() ==
...@@ -904,12 +903,17 @@ namespace AMDiS { ...@@ -904,12 +903,17 @@ namespace AMDiS {
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat); vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
primalDofMap.init(mpiComm, feSpaces, true, true); primalDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
dualDofMap.init(mpiComm, feSpaces, false, false); true, true);
lagrangeMap.init(mpiComm, feSpaces, true, true); dualDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
localDofMap.init(mpiComm, feSpaces, false, false); false, false);
lagrangeMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
true, true);
localDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
if (fetiPreconditioner == FETI_DIRICHLET) if (fetiPreconditioner == FETI_DIRICHLET)
interiorDofMap.init(mpiComm, feSpaces, false, false); interiorDofMap.init(mpiComm, feSpaces, meshDistributor->getFeSpaces(),
false, false);
updateDofData(); updateDofData();
...@@ -920,23 +924,23 @@ namespace AMDiS { ...@@ -920,23 +924,23 @@ namespace AMDiS {
int nRowsRankPrimal = primalDofMap.getRankDofs(); int nRowsRankPrimal = primalDofMap.getRankDofs();
int nRowsOverallPrimal = primalDofMap.getOverallDofs(); int nRowsOverallPrimal = primalDofMap.getOverallDofs();
MatCreateSeqAIJ(PETSC_COMM_SELF, nRowsRankB, nRowsRankB, 30, PETSC_NULL, MatCreateSeqAIJ(PETSC_COMM_SELF, nRowsRankB, nRowsRankB, 60, PETSC_NULL,
&mat_b_b); &mat_b_b);
MatCreateMPIAIJ(PETSC_COMM_WORLD, MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankPrimal, nRowsRankPrimal, nRowsRankPrimal,
nRowsOverallPrimal, nRowsOverallPrimal, nRowsOverallPrimal, nRowsOverallPrimal,
30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_primal); 60, PETSC_NULL, 60, PETSC_NULL, &mat_primal_primal);
MatCreateMPIAIJ(PETSC_COMM_WORLD, MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankB, nRowsRankPrimal, nRowsRankB, nRowsRankPrimal,
nRowsOverallB, nRowsOverallPrimal, nRowsOverallB, nRowsOverallPrimal,
30, PETSC_NULL, 30, PETSC_NULL, &mat_b_primal); 60, PETSC_NULL, 60, PETSC_NULL, &mat_b_primal);
MatCreateMPIAIJ(PETSC_COMM_WORLD, MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankB, nRowsRankPrimal, nRowsRankB,
nRowsOverallPrimal, nRowsOverallB, nRowsOverallPrimal, nRowsOverallB,
15, PETSC_NULL, 15, PETSC_NULL, &mat_primal_b); 30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b);
// === Create matrices for FETI-DP preconditioner. === // === Create matrices for FETI-DP preconditioner. ===
...@@ -945,22 +949,22 @@ namespace AMDiS { ...@@ -945,22 +949,22 @@ namespace AMDiS {
int nRowsDual = dualDofMap.getRankDofs(); int nRowsDual = dualDofMap.getRankDofs();
MatCreateSeqAIJ(PETSC_COMM_SELF, MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsDual, nRowsDual, 30, PETSC_NULL, nRowsDual, nRowsDual, 60, PETSC_NULL,
&mat_duals_duals); &mat_duals_duals);
if (fetiPreconditioner == FETI_DIRICHLET) { if (fetiPreconditioner == FETI_DIRICHLET) {
int nRowsInterior = interiorDofMap.getRankDofs(); int nRowsInterior = interiorDofMap.getRankDofs();
MatCreateSeqAIJ(PETSC_COMM_SELF, MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsInterior, nRowsInterior, 30, PETSC_NULL, nRowsInterior, nRowsInterior, 60, PETSC_NULL,
&mat_interior_interior); &mat_interior_interior);
MatCreateSeqAIJ(PETSC_COMM_SELF, MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsInterior, nRowsDual, 30, PETSC_NULL, nRowsInterior, nRowsDual, 60, PETSC_NULL,
&mat_interior_duals); &mat_interior_duals);
MatCreateSeqAIJ(PETSC_COMM_SELF, MatCreateSeqAIJ(PETSC_COMM_SELF,
nRowsDual, nRowsInterior, 30, PETSC_NULL, nRowsDual, nRowsInterior, 60, PETSC_NULL,
&mat_duals_interior); &mat_duals_interior);
} }
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment