Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Ein Anmelden über dieses erzeugt ein neues Konto. Das alte Konto ist über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. Logging in via this will create a new account. The old account can be accessed via the "Standard" tab. The administrators

Commit f06082db authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Work on BDDCML integration, still not working.

parent 10141fab
...@@ -66,27 +66,35 @@ namespace AMDiS { ...@@ -66,27 +66,35 @@ namespace AMDiS {
int nLevel = 1; int nLevel = 2;
int nSubdomains = meshDistributor->getMpiSize(); int nSubdomains[nLevel];
int length = 1; nSubdomains[0] = meshDistributor->getMpiSize();
nSubdomains[1] = 1;
int nSubPerProc = 1; int nSubPerProc = 1;
MPI_Fint c2f = MPI_Comm_c2f(meshDistributor->getMpiComm()); MPI_Fint c2f = MPI_Comm_c2f(meshDistributor->getMpiComm());
int verboseLevel = 2; int verboseLevel = 2;
int numbase = 0; int numbase = 0;
bddcml_init(&nLevel, &nSubdomains, &length, &nSubPerProc, bddcml_init(&nLevel, nSubdomains, &nLevel, &nSubPerProc,
&c2f, &verboseLevel, &numbase); &c2f, &verboseLevel, &numbase);
// global number of elements // global number of elements
int nelem = mesh->getNumberOfLeaves(); int nelem = mesh->getNumberOfLeaves();
mpi::globalAdd(nelem); mpi::globalAdd(nelem);
MSG("nelem = %d\n", nelem);
// global number of nodes // global number of nodes
int nnod = meshDistributor->getNumberOverallDofs(feSpace); int nnod = meshDistributor->getNumberOverallDofs(feSpace);
MSG("nnod = %d\n", nnod);
// global number of dofs // global number of dofs
int ndof = nnod * nComponents; int ndof = nnod * nComponents;
MSG("ndof = %d\n", ndof);
// space dimenstion // space dimenstion
int ndim = 2; int ndim = 2;
...@@ -99,12 +107,16 @@ namespace AMDiS { ...@@ -99,12 +107,16 @@ namespace AMDiS {
// local number of elements // local number of elements
int nelems = nLeafEls; int nelems = nLeafEls;
MSG("nelems = %d\n", nelems);
// local number of nodes // local number of nodes
int nnods = feSpace->getAdmin()->getUsedSize(); int nnods = feSpace->getAdmin()->getUsedSize();
// local number of dofs // local number of dofs
int ndofs = nnods * nComponents; int ndofs = nnods * nComponents;
MSG("local nnods %d ndofs %d\n", nnods, ndofs);
// Length of array inet // Length of array inet
int linet = nelems * 3; int linet = nelems * 3;
...@@ -112,6 +124,9 @@ namespace AMDiS { ...@@ -112,6 +124,9 @@ namespace AMDiS {
int inet[linet]; int inet[linet];
elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_LEAF_EL); elInfo = stack.traverseFirst(mesh, -1, Mesh::CALL_LEAF_EL);
while (elInfo) { while (elInfo) {
TEST_EXIT_DBG(mapElIndex.count(elInfo->getElement()->getIndex()))
("Should not happen!\n");
int localElIndex = mapElIndex[elInfo->getElement()->getIndex()]; int localElIndex = mapElIndex[elInfo->getElement()->getIndex()];
for (int i = 0; i < 3; i++) for (int i = 0; i < 3; i++)
inet[localElIndex * 3 + i] = elInfo->getElement()->getDof(i, 0); inet[localElIndex * 3 + i] = elInfo->getElement()->getDof(i, 0);
...@@ -135,7 +150,7 @@ namespace AMDiS { ...@@ -135,7 +150,7 @@ namespace AMDiS {
isngn[i] = meshDistributor->mapLocalToGlobal(feSpace, i); isngn[i] = meshDistributor->mapLocalToGlobal(feSpace, i);
// array of indices of subdomain variables in global numbering // array of indices of subdomain variables in global numbering
int isvgvn[ndof]; int isvgvn[ndofs];
for (int j = 0; j < nnods; j++) for (int j = 0; j < nnods; j++)
for (int i = 0; i < nComponents; i++) for (int i = 0; i < nComponents; i++)
isvgvn[j * nComponents + i] = isvgvn[j * nComponents + i] =
...@@ -146,6 +161,7 @@ namespace AMDiS { ...@@ -146,6 +161,7 @@ namespace AMDiS {
int rStartEl, nOverallEl; int rStartEl, nOverallEl;
mpi::getDofNumbering(meshDistributor->getMpiComm(), mpi::getDofNumbering(meshDistributor->getMpiComm(),
nelems, rStartEl, nOverallEl); nelems, rStartEl, nOverallEl);
MSG("rStartEl = %d\n", rStartEl);
for (int i = 0; i < nelems; i++) for (int i = 0; i < nelems; i++)
isegn[i] = rStartEl + i; isegn[i] = rStartEl + i;
...@@ -168,10 +184,12 @@ namespace AMDiS { ...@@ -168,10 +184,12 @@ namespace AMDiS {
// local array of indices denoting dirichlet boundary data // local array of indices denoting dirichlet boundary data
int ifix[ndofs]; int ifix[ndofs];
for (int i = 0; i < ndofs; i++) for (int i = 0; i < ndofs; i++)
ifix[ndofs] = -1; ifix[i] = 0;
// local array of values for dirichlet boundary data // local array of values for dirichlet boundary data
double fixv[ndofs]; double fixv[ndofs];
for (int i = 0; i < ndofs; i++)
fixv[i] = 0.0;
// local rhs data // local rhs data
double rhs[ndofs]; double rhs[ndofs];
...@@ -182,7 +200,7 @@ namespace AMDiS { ...@@ -182,7 +200,7 @@ namespace AMDiS {
} }
// Completenes of the rhs vector on subdomains // Completenes of the rhs vector on subdomains
int is_rhs_complete = 1; int is_rhs_complete = 0;
// Local array with initial solution guess // Local array with initial solution guess
double sol[ndofs]; double sol[ndofs];
...@@ -206,9 +224,11 @@ namespace AMDiS { ...@@ -206,9 +224,11 @@ namespace AMDiS {
// Number of non-zero entries in matrix // Number of non-zero entries in matrix
int la = i_sparse.size(); int la = i_sparse.size();
MSG("LOCAL LA = %d\n", la);
// Matrix is assembled // Matrix is assembled
int is_assembled_int = 1; int is_assembled_int = 0;
bddcml_upload_subdomain_data(&nelem, bddcml_upload_subdomain_data(&nelem,
...@@ -229,7 +249,7 @@ namespace AMDiS { ...@@ -229,7 +249,7 @@ namespace AMDiS {
isngn, isngn,
&nnods, &nnods,
isvgvn, isvgvn,
&ndof, &ndofs,
isegn, isegn,
&nelems, &nelems,
xyz, xyz,
...@@ -255,13 +275,14 @@ namespace AMDiS { ...@@ -255,13 +275,14 @@ namespace AMDiS {
int use_defaults_int = 1; int use_defaults_int = 1;
int parallel_division_int = 1; int parallel_division_int = 1;
int use_arithmetic_int = 1; int use_arithmetic_int = 1;
int use_adaptive_int = 1; int use_adaptive_int = 0;
MSG("BDDC POINT A\n");
bddcml_setup_preconditioner(&matrixtype, bddcml_setup_preconditioner(&matrixtype,
&use_defaults_int, &use_defaults_int,
&parallel_division_int, &parallel_division_int,
&use_arithmetic_int, &use_arithmetic_int,
&use_adaptive_int); &use_adaptive_int);
MSG("BDDC POINT B\n");
int method = 1; int method = 1;
double tol = 1.e-6; double tol = 1.e-6;
...@@ -280,6 +301,8 @@ namespace AMDiS { ...@@ -280,6 +301,8 @@ namespace AMDiS {
&converged_reason, &converged_reason,
&condition_number); &condition_number);
MSG("BDDC POINT C\n");
MSG("BDDCML converged reason: %d within %d iterations \n", MSG("BDDCML converged reason: %d within %d iterations \n",
converged_reason, num_iter); converged_reason, num_iter);
...@@ -294,9 +317,9 @@ namespace AMDiS { ...@@ -294,9 +317,9 @@ namespace AMDiS {
void BddcMlSolver::addDofMatrix(DOFMatrix* dmat, void BddcMlSolver::addDofMatrix(DOFMatrix* dmat,
vector<int> i_sparse, vector<int>& i_sparse,
vector<int> j_sparse, vector<int>& j_sparse,
vector<double> a_sparse, vector<double>& a_sparse,
int nComponents, int nComponents,
int ithRowComponent, int ithRowComponent,
int ithColComponent) int ithColComponent)
......
...@@ -51,9 +51,9 @@ namespace AMDiS { ...@@ -51,9 +51,9 @@ namespace AMDiS {
protected: protected:
void addDofMatrix(DOFMatrix* mat, void addDofMatrix(DOFMatrix* mat,
vector<int> i_sparse, vector<int>& i_sparse,
vector<int> j_sparse, vector<int>& j_sparse,
vector<double> a_sparse, vector<double>& a_sparse,
int nComponents, int nComponents,
int ithRowComponent, int ithRowComponent,
int ithColComponent); int ithColComponent);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment