Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer,
es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konten der externen Nutzer:innen sind über den Reiter "Standard" erreichbar.
Die Administratoren


Dear Gitlab user,
it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab.
The administrators

Commit 9a7aa655 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

Reformulation of the solution of the Schur primal matrix in FETI-DP solver.

parent 2eeb5654
......@@ -31,7 +31,7 @@ namespace AMDiS {
};
Initfile* Initfile::singlett= NULL;
Initfile* Initfile::singlett = NULL;
std::set<std::string> Initfile::fn_include_list;
......@@ -46,7 +46,7 @@ namespace AMDiS {
// initialize global strcutures using parameters
Global::init();
};
}
/// Fill an initfile from a file with filename fn
......@@ -62,7 +62,7 @@ namespace AMDiS {
fn_include_list.insert(fn);
read(inputFile);
}
};
}
/// Fill an initfile from an input stream
......@@ -71,7 +71,7 @@ namespace AMDiS {
unsigned line_length = 512;
char swap[line_length];
in.getline(swap, line_length);
while (in.good() || in.gcount()>0) {
while (in.good() || in.gcount() > 0) {
std::string whitespaces = " \t\r\f\n";
std::string sw(swap);
size_t pos0 = sw.find_first_not_of(whitespaces);
......@@ -87,16 +87,17 @@ namespace AMDiS {
std::string paramName = variableReplacement(InitfileInternal::trim(parser.name));
std::string paramValue = variableReplacement(InitfileInternal::trim(parser.value));
operator[](paramName) = paramValue;
#ifndef DEBUG
#ifndef DEBUG
int info = 0;
get("parameter information", info, 0);
if (info >= 2)
#endif
#endif
{
std::cout << "read [" << paramName << " => " << paramValue << "]" << std::endl;
}
} else if (sw[pos0] == '#'
&& static_cast<size_t>(sw.find("#include")) == pos0) {
} else if (pos0 != std::string::npos &&
sw[pos0] == '#' &&
static_cast<size_t>(sw.find("#include")) == pos0) {
// include file by '#include "filename"' or '#include <filename>'
bool forceRead = false;
int posAdd = 1;
......@@ -127,9 +128,10 @@ namespace AMDiS {
}
read(fn, forceRead);
}
in.getline(swap, line_length);
}
};
}
std::string Initfile::variableReplacement(const std::string& input) const
......@@ -157,7 +159,7 @@ namespace AMDiS {
}
return inputSwap;
};
}
void Initfile::readArgv(int argc, char **argv)
......@@ -192,7 +194,7 @@ namespace AMDiS {
if (msgInfo == 0)
paramInfo = 0;
};
}
/// print all parameters to std::cout
......@@ -201,7 +203,7 @@ namespace AMDiS {
initIntern();
for (Initfile::iterator it = singlett->begin(); it != singlett->end(); it++)
std::cout << (*it).first << " => " << (*it).second << std::endl;
};
}
/// Write data-map to initfile
......
......@@ -150,8 +150,6 @@ namespace AMDiS {
DimVec<int> *ind = NULL;
TEST_EXIT(filename != "")("No filename specified!\n");
// TEST_EXIT(filename.length() < static_cast<unsigned int>(127))
// ("can only handle filenames up to 127 characters\n");
FILE *file = fopen(filename.c_str(), "r");
TEST_EXIT(file)("cannot open file %s\n", filename.c_str());
......
......@@ -228,6 +228,9 @@ namespace AMDiS {
ParallelDebug::printBoundaryInfo(*this);
#endif
// === Remove neighbourhood relations due to periodic bounday conditions. ===
for (deque<MacroElement*>::iterator it = mesh->firstMacroElement();
it != mesh->endOfMacroElements(); ++it) {
for (int i = 0; i < mesh->getGeo(NEIGH); i++) {
......
......@@ -609,8 +609,6 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::createSchurPrimal()");
MSG("MAT SCHUR PRIMAL SOLVER = %d\n", schurPrimalSolver);
if (schurPrimalSolver == 0) {
schurPrimalData.mat_primal_primal = &mat_primal_primal;
schurPrimalData.mat_primal_b = &mat_primal_b;
......@@ -637,18 +635,111 @@ namespace AMDiS {
KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_");
KSPSetFromOptions(ksp_schur_primal);
} else {
MatDuplicate(mat_primal_primal, MAT_COPY_VALUES, &mat_schur_primal);
TEST_EXIT(ksp_b)("No solver object for local problem!\n");
double wtime = MPI::Wtime();
Vec tmpVecB;
VecCreateMPI(PETSC_COMM_WORLD,
nRankB * nComponents,
nOverallB * nComponents, &tmpVecB);
int nRowsRankPrimal = nRankPrimals * nComponents;
int nRowsOverallPrimal = nOverallPrimals * nComponents;
Vec tmpVecPrimal;
VecCreateMPI(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsOverallPrimal, &tmpVecPrimal);
Mat matPrimal;
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankPrimal,
nRowsOverallPrimal, nRowsOverallPrimal,
10, PETSC_NULL, 10, PETSC_NULL, &matPrimal);
Mat tmp0, tmp1;
MatMatSolve(mat_b_b, mat_b_primal, tmp0);
MatMatMult(mat_primal_b, tmp0, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tmp1);
MatDestroy(&tmp0);
MatDestroy(&tmp1);
PetscInt nCols;
const PetscInt *cols;
const PetscScalar *values;
MSG("EXIT!\n");
int nLocalPrimals = globalPrimalIndex.size() * nComponents;
map<int, vector<pair<int, double> > > mat_b_primal_cols;
for (int i = 0; i < nRankB * nComponents; i++) {
PetscInt row = rStartB * nComponents + i;
MatGetRow(mat_b_primal, row, &nCols, &cols, &values);
for (int j = 0; j < nCols; j++)
if (values[j] != 0.0)
mat_b_primal_cols[cols[j]].push_back(make_pair(i, values[j]));
MatRestoreRow(mat_b_primal, i, &nCols, &cols, &values);
}
int maxLocalPrimal = mat_b_primal_cols.size();
mpi::globalMax(maxLocalPrimal);
TEST_EXIT(mat_b_primal_cols.size() ==
globalPrimalIndex.size() * nComponents)("Should not happen!\n");
for (map<int, vector<pair<int, double> > >::iterator it = mat_b_primal_cols.begin();
it != mat_b_primal_cols.end(); ++it) {
Vec tmpVec;
VecCreateSeq(PETSC_COMM_SELF, nRankB * nComponents, &tmpVec);
for (unsigned int i = 0; i < it->second.size(); i++)
VecSetValue(tmpVec,
it->second[i].first, it->second[i].second, INSERT_VALUES);
VecAssemblyBegin(tmpVec);
VecAssemblyEnd(tmpVec);
KSPSolve(ksp_b, tmpVec, tmpVec);
PetscScalar *tmpValuesB, *tmpValues;
VecGetArray(tmpVecB, &tmpValuesB);
VecGetArray(tmpVec, &tmpValues);
for (int i = 0; i < nRankB * nComponents; i++)
tmpValuesB[i] = tmpValues[i];
VecRestoreArray(tmpVec, &tmpValues);
VecRestoreArray(tmpVecB, &tmpValuesB);
MatMult(mat_primal_b, tmpVecB, tmpVecPrimal);
VecGetArray(tmpVecPrimal, &tmpValues);
for (int i = 0; i < nRowsRankPrimal; i++)
MatSetValue(matPrimal, rStartPrimals * nComponents + i,
it->first, tmpValues[i], ADD_VALUES);
VecRestoreArray(tmpVecPrimal, &tmpValues);
VecDestroy(&tmpVec);
}
for (int i = mat_b_primal_cols.size(); i < maxLocalPrimal; i++)
MatMult(mat_primal_b, tmpVecB, tmpVecPrimal);
VecDestroy(&tmpVecB);
VecDestroy(&tmpVecPrimal);
MatAssemblyBegin(matPrimal, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matPrimal, MAT_FINAL_ASSEMBLY);
MatAXPY(mat_primal_primal, -1.0, matPrimal, DIFFERENT_NONZERO_PATTERN);
MatDestroy(&matPrimal);
MatInfo minfo;
MatGetInfo(mat_primal_primal, MAT_GLOBAL_SUM, &minfo);
MSG("Schur primal matrix nnz = %f\n", minfo.nz_used);
KSPCreate(PETSC_COMM_WORLD, &ksp_schur_primal);
KSPSetOperators(ksp_schur_primal, mat_primal_primal,
mat_primal_primal, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_schur_primal, "solver_sp_");
KSPSetFromOptions(ksp_schur_primal);
exit(0);
MSG("Creating Schur primal matrix needed %.5f seconds.\n",
MPI::Wtime() - wtime);
}
}
......@@ -657,16 +748,20 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverFeti::destroySchurPrimal()");
schurPrimalData.mat_primal_primal = PETSC_NULL;
schurPrimalData.mat_primal_b = PETSC_NULL;
schurPrimalData.mat_b_primal = PETSC_NULL;
schurPrimalData.fetiSolver = NULL;
if (schurPrimalSolver == 0) {
schurPrimalData.mat_primal_primal = PETSC_NULL;
schurPrimalData.mat_primal_b = PETSC_NULL;
schurPrimalData.mat_b_primal = PETSC_NULL;
schurPrimalData.fetiSolver = NULL;
VecDestroy(&schurPrimalData.tmp_vec_b);
VecDestroy(&schurPrimalData.tmp_vec_primal);
VecDestroy(&schurPrimalData.tmp_vec_b);
VecDestroy(&schurPrimalData.tmp_vec_primal);
MatDestroy(&mat_schur_primal);
KSPDestroy(&ksp_schur_primal);
MatDestroy(&mat_schur_primal);
KSPDestroy(&ksp_schur_primal);
} else {
KSPDestroy(&ksp_schur_primal);
}
}
......@@ -676,7 +771,6 @@ namespace AMDiS {
// === Create FETI-DP solver object. ===
fetiData.mat_primal_primal = &mat_primal_primal;
fetiData.mat_primal_b = &mat_primal_b;
fetiData.mat_b_primal = &mat_b_primal;
fetiData.mat_lagrange = &mat_lagrange;
......@@ -768,7 +862,6 @@ namespace AMDiS {
// === Destroy FETI-DP solver object. ===
fetiData.mat_primal_primal = PETSC_NULL;
fetiData.mat_primal_b = PETSC_NULL;
fetiData.mat_b_primal = PETSC_NULL;
fetiData.mat_lagrange = PETSC_NULL;
......@@ -922,10 +1015,6 @@ namespace AMDiS {
int nRowsInterior = nLocalInterior * nComponents;
int nRowsDual = nLocalDuals * nComponents;
// MatCreateMPIAIJ(PETSC_COMM_WORLD,
// nRowsRankB, nRowsRankB, nRowsOverallB, nRowsOverallB,
// 30, PETSC_NULL, 0, PETSC_NULL, &mat_b_b);
MatCreateSeqAIJ(PETSC_COMM_SELF, nRowsRankB, nRowsRankB, 30, PETSC_NULL,
&mat_b_b);
......@@ -942,7 +1031,7 @@ namespace AMDiS {
MatCreateMPIAIJ(PETSC_COMM_WORLD,
nRowsRankPrimal, nRowsRankB,
nRowsOverallPrimal, nRowsOverallB,
30, PETSC_NULL, 30, PETSC_NULL, &mat_primal_b);
10, PETSC_NULL, 10, PETSC_NULL, &mat_primal_b);
// === Create matrices for FETI-DP preconditioner. ===
......@@ -1096,7 +1185,7 @@ namespace AMDiS {
}
}
} // for each nnz in row
if (rowPrimal) {
TEST_EXIT_DBG(globalPrimalIndex.count(*cursor))
......@@ -1113,16 +1202,18 @@ namespace AMDiS {
TEST_EXIT_DBG(localIndexB.count(*cursor))
("Should not happen!\n");
// int rowIndex = (localIndexB[*cursor] + rStartB) * nComponents + i;
int rowIndex = localIndexB[*cursor] * nComponents + i;
int localRowIndex = localIndexB[*cursor] * nComponents + i;
for (unsigned int k = 0; k < cols.size(); k++)
cols[k] -= rStartB * nComponents;
MatSetValues(mat_b_b, 1, &rowIndex, cols.size(),
MatSetValues(mat_b_b, 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
if (colsOther.size())
MatSetValues(mat_b_primal, 1, &rowIndex, colsOther.size(),
if (colsOther.size()) {
int globalRowIndex =
(localIndexB[*cursor] + rStartB) * nComponents + i;
MatSetValues(mat_b_primal, 1, &globalRowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
}
// === For preconditioner ===
......@@ -1214,6 +1305,14 @@ namespace AMDiS {
createMatLagrange();
// === Create solver for the non primal (thus local) variables. ===
KSPCreate(PETSC_COMM_SELF, &ksp_b);
KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_b, "solver_b_");
KSPSetFromOptions(ksp_b);
// === Create PETSc solver for the Schur complement on primal variables. ===
......@@ -1223,13 +1322,6 @@ namespace AMDiS {
// === Create PETSc solver for the FETI-DP operator. ===
createFetiKsp();
// === Create solver for the non primal (thus local) variables. ===
KSPCreate(PETSC_COMM_SELF, &ksp_b);
KSPSetOperators(ksp_b, mat_b_b, mat_b_b, SAME_NONZERO_PATTERN);
KSPSetOptionsPrefix(ksp_b, "solver_b_");
KSPSetFromOptions(ksp_b);
}
......@@ -1329,10 +1421,8 @@ namespace AMDiS {
// === Create new rhs ===
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vec_rhs = L * inv(K_BB) * f_B
solveLocalProblem(f_b, tmp_b0);
MatMult(mat_lagrange, tmp_b0, vec_rhs);
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
......@@ -1358,11 +1448,10 @@ namespace AMDiS {
// === Solve with FETI-DP operator. ===
first = MPI::Wtime();
KSPSolve(ksp_feti, vec_rhs, vec_rhs);
fetiStatistics.timeFetiApply = MPI::Wtime() -first;
// === Solve for u_primals. ===
VecCopy(f_primal, tmp_primal0);
......@@ -1446,9 +1535,9 @@ namespace AMDiS {
int debug = 0;
Parameters::get("parallel->debug feti", debug);
resetStatistics();
// resetStatistics();
solveReducedFetiMatrix(vec);
printStatistics();
// printStatistics();
MeshDistributor::globalMeshDistributor->synchVector(vec);
}
......@@ -1469,7 +1558,7 @@ namespace AMDiS {
void PetscSolverFeti::printStatistics()
{
MSG("PetscSolverFeti::printStatistics()");
FUNCNAME("PetscSolverFeti::printStatistics()");
if (MPI::COMM_WORLD.Get_rank() == 0)
MSG("FETI-STATISTICS: %d %f %d %f %d %f %d %f\n",
......
......@@ -62,9 +62,6 @@ namespace AMDiS {
* \ref petscMultMatFeti
*/
struct FetiData {
/// Pointers to the matrix containing the primal variables.
Mat *mat_primal_primal;
/// Coupling matrices between the primal and the B variables.
Mat *mat_primal_b, *mat_b_primal;
......
......@@ -389,20 +389,20 @@ namespace AMDiS {
vector<int> sendBuffers, recvBuffers;
int requestCounter = 0;
int i = 0;
for (typename map<int, int>::iterator sendIt = sendDataSize.begin();
sendIt != sendDataSize.end(); ++sendIt) {
sendBuffers.push_back(sendIt->second);
request[requestCounter++] =
mpiComm.Isend(&(sendBuffers[i]), 1, MPI_INT, sendIt->first, 0);
i++;
request[requestCounter] =
mpiComm.Isend(&(sendBuffers[requestCounter]), 1,
MPI_INT, sendIt->first, 0);
requestCounter++;
}
for (map<int, int>::iterator recvIt = recvDataSize.begin();
recvIt != recvDataSize.end(); ++recvIt)
request[requestCounter++] =
mpiComm.Irecv(&(recvIt->second), 1, MPI_INT, recvIt->first, 0);
MPI::Request::Waitall(requestCounter, request);
}
......
......@@ -166,7 +166,7 @@ namespace AMDiS {
i, adaptInfo->getTimeEstSum(i));
if (errorEst > timeTol)
WARNING("Accepted timestep but tolerance not reached \n" );
MSG("Accepted timestep but tolerance not reached \n");
} else {
for (int i = 0; i < adaptInfo->getSize(); i++)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment