Liebe Gitlab-Nutzer, lieber Gitlab-Nutzer, es ist nun möglich sich mittels des ZIH-Logins/LDAP an unserem Dienst anzumelden. Die Konto der externen Nutzer:innen sind über den Reiter "Standard" erreichbar. Die Administratoren

Dear Gitlab user, it is now possible to log in to our service using the ZIH login/LDAP. The accounts of external users can be accessed via the "Standard" tab. The administrators

Commit e6efd667 authored by Thomas Witkowski's avatar Thomas Witkowski
Browse files

UI, IT WORKS.....

parent 1f9daa76
......@@ -43,6 +43,7 @@ namespace AMDiS {
inserter(NULL)
{}
DOFMatrix::DOFMatrix(const FiniteElemSpace* rowSpace,
const FiniteElemSpace* colSpace,
std::string n)
......@@ -74,6 +75,7 @@ namespace AMDiS {
applyDBCs.clear();
}
DOFMatrix::DOFMatrix(const DOFMatrix& rhs)
: name(rhs.name + "copy")
{
......@@ -87,6 +89,7 @@ namespace AMDiS {
inserter = 0;
}
DOFMatrix::~DOFMatrix()
{
FUNCNAME("DOFMatrix::~DOFMatrix()");
......@@ -99,6 +102,7 @@ namespace AMDiS {
delete inserter;
}
void DOFMatrix::print() const
{
FUNCNAME("DOFMatrix::print()");
......@@ -107,6 +111,7 @@ namespace AMDiS {
inserter->print();
}
bool DOFMatrix::symmetric()
{
FUNCNAME("DOFMatrix::symmetric()");
......@@ -132,6 +137,7 @@ namespace AMDiS {
return true;
}
void DOFMatrix::test()
{
FUNCNAME("DOFMatrix::test()");
......@@ -144,6 +150,7 @@ namespace AMDiS {
MSG("Matrix `%s' is symmetric.\n", name.data());
}
DOFMatrix& DOFMatrix::operator=(const DOFMatrix& rhs)
{
rowFeSpace = rhs.rowFeSpace;
......@@ -168,6 +175,7 @@ namespace AMDiS {
return *this;
}
void DOFMatrix::addElementMatrix(const ElementMatrix& elMat,
const BoundaryType *bound,
ElInfo* rowElInfo,
......@@ -232,6 +240,8 @@ namespace AMDiS {
for (int j = 0; j < nCol; j++) {
DegreeOfFreedom col = colIndices[j];
ins[row][col] += elMat[i][j];
// MSG("El %d DOFs %d %d value %f\n", rowElInfo->getElement()->getIndex(), row, col, elMat[i][j]);
}
}
}
......
......@@ -611,8 +611,7 @@ namespace AMDiS {
// === Update periodic mapping, if there are periodic boundaries. ===
createPeriodicMap();
createPeriodicMap();
INFO(info, 8)("Parallel mesh adaption needed %.5f seconds\n",
MPI::Wtime() - first);
......@@ -1429,10 +1428,10 @@ namespace AMDiS {
mesh->getDofIndexCoords(it->first.first, feSpace, c0);
mesh->getDofIndexCoords(it->first.second, feSpace, c1);
// MSG("CREATE BOUNDARY FOR DOF MAP: %d (%.3f %.3f %.3f)<-> %d (%.3f %.3f %.3f)\n", it->first.first,
// c0[0], c0[1], c0[2], it->first.second, c1[0], c1[1], c1[2]);
// MSG("CREATE BOUNDARY FOR DOF MAP: %d (%.3f %.3f %.3f)<-> %d (%.3f %.3f %.3f)\n", it->first.first,
// c0[0], c0[1], c0[2], it->first.second, c1[0], c1[1], c1[2]);
ElementObjectData& perDofEl0 = elObjects.getElementsInRank(it->first.first)[mpiRank];
// MSG("DATA: %d %d %d\n", perDofEl0.elIndex, VERTEX, perDofEl0.ithObject);
// MSG("DATA: %d %d %d\n", perDofEl0.elIndex, VERTEX, perDofEl0.ithObject);
for (map<int, ElementObjectData>::iterator elIt = elObjects.getElementsInRank(it->first.second).begin();
elIt != elObjects.getElementsInRank(it->first.second).end(); ++elIt) {
......@@ -1629,9 +1628,9 @@ namespace AMDiS {
periodicBoundary.boundary[rankIt->first][j].neighObj.ithObj);
}
}
*/
for (RankToBoundMap::iterator rankIt = periodicBoundary.boundary.begin();
rankIt != periodicBoundary.boundary.end(); ++rankIt) {
......@@ -1834,28 +1833,10 @@ namespace AMDiS {
ParallelDebug::testGlobalIndexByCoords(*this);
ParallelDebug::writeDebugFile(*this, debugOutputDir + "mpi-dbg", "dat");
#if 1
MSG("------------- Debug information -------------\n");
MSG("nRankDofs = %d\n", nRankDofs);
MSG("nOverallDofs = %d\n", nOverallDofs);
MSG("rstart %d\n", rstart);
/*
for (RankToDofContainer::iterator it = sendDofs.begin();
it != sendDofs.end(); ++it) {
MSG("send %d DOFs to rank %d\n", it->second.size(), it->first);
for (unsigned int i = 0; i < it->second.size(); i++)
MSG("%d send DOF: %d\n", i, *(it->second[i]));
}
for (RankToDofContainer::iterator it = recvDofs.begin();
it != recvDofs.end(); ++it) {
MSG("recv %d DOFs from rank %d\n", it->second.size(), it->first);
for (unsigned int i = 0; i < it->second.size(); i++)
MSG("%d recv DOF: %d\n", i, *(it->second[i]));
}
*/
#endif
#endif
}
......@@ -1882,7 +1863,7 @@ namespace AMDiS {
for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
it != periodicBoundary.boundary.end(); ++it) {
// MSG("------------- WITH RANK %d ------------------\n", it->first);
// MSG("------------- WITH RANK %d ------------------\n", it->first);
if (it->first == mpiRank) {
TEST_EXIT_DBG(it->second.size() % 2 == 0)("Should not happen!\n");
......@@ -1895,9 +1876,9 @@ namespace AMDiS {
bound.neighObj.el->getVertexDofs(feSpace, bound.neighObj, dofs1);
bound.neighObj.el->getNonVertexDofs(feSpace, bound.neighObj, dofs1);
// MSG("BOUND-I %d-%d-%d WITH %d-%d-%d\n",
// bound.rankObj.elIndex, bound.rankObj.subObj, bound.rankObj.ithObj,
// bound.neighObj.elIndex, bound.neighObj.subObj, bound.neighObj.ithObj);
// MSG("BOUND-I %d-%d-%d WITH %d-%d-%d\n",
// bound.rankObj.elIndex, bound.rankObj.subObj, bound.rankObj.ithObj,
// bound.neighObj.elIndex, bound.neighObj.subObj, bound.neighObj.ithObj);
TEST_EXIT_DBG(dofs0.size() == dofs1.size())("Should not happen!\n");
......@@ -1911,7 +1892,7 @@ namespace AMDiS {
periodicDof[type][globalDof0] = globalDof1;
periodicDofAssociations[globalDof0].insert(type);
// MSG("SET(A TYPE %d) DOF %d -> %d\n", type, globalDof0, globalDof1);
// MSG("SET(A TYPE %d) DOF %d -> %d\n", type, globalDof0, globalDof1);
}
}
}
......@@ -1924,19 +1905,19 @@ namespace AMDiS {
int nDofs = dofs.size();
// MSG("BOUND-R (T = %d) %d-%d-%d WITH %d-%d-%d\n",
// boundIt->type,
// boundIt->rankObj.elIndex, boundIt->rankObj.subObj, boundIt->rankObj.ithObj,
// boundIt->neighObj.elIndex, boundIt->neighObj.subObj, boundIt->neighObj.ithObj);
// MSG("BOUND-R (T = %d) %d-%d-%d WITH %d-%d-%d\n",
// boundIt->type,
// boundIt->rankObj.elIndex, boundIt->rankObj.subObj, boundIt->rankObj.ithObj,
// boundIt->neighObj.elIndex, boundIt->neighObj.subObj, boundIt->neighObj.ithObj);
boundIt->rankObj.el->getVertexDofs(feSpace, boundIt->rankObj, dofs);
boundIt->rankObj.el->getNonVertexDofs(feSpace, boundIt->rankObj, dofs);
for (unsigned int i = 0; i < (dofs.size() - nDofs); i++) {
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[nDofs + i]), feSpace, c);
// MSG(" SEND i = %d DOF = %d (%f %f %f)\n", nDofs + i, mapLocalGlobalDofs[*(dofs[nDofs + i])], c[0], c[1], c[2]);
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[nDofs + i]), feSpace, c);
// MSG(" SEND i = %d DOF = %d (%f %f %f)\n", nDofs + i, mapLocalGlobalDofs[*(dofs[nDofs + i])], c[0], c[1], c[2]);
rankToDofType[it->first].push_back(boundIt->type);
}
}
......@@ -1964,7 +1945,7 @@ namespace AMDiS {
for (RankToBoundMap::iterator it = periodicBoundary.boundary.begin();
it != periodicBoundary.boundary.end(); ++it) {
// MSG("------------- WITH RANK %d ------------------\n", it->first);
// MSG("------------- WITH RANK %d ------------------\n", it->first);
DofContainer& dofs = rankPeriodicDofs[it->first];
vector<int>& types = rankToDofType[it->first];
......@@ -1977,17 +1958,16 @@ namespace AMDiS {
int mapGlobalDofIndex = stdMpi.getRecvData(it->first)[i];
BoundaryType type = types[i];
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[i]), feSpace, c);
// MSG(" RECV i = %d DOF = %d (%f %f %f)\n", i, *(dofs[i]), c[0], c[1], c[2]);
// WorldVector<double> c;
// mesh->getDofIndexCoords(*(dofs[i]), feSpace, c);
// MSG(" RECV i = %d DOF = %d (%f %f %f)\n", i, *(dofs[i]), c[0], c[1], c[2]);
// Check if this global dof with the corresponding boundary type was
// not added before by another periodic boundary from other rank.
if (periodicDofAssociations[globalDofIndex].count(type) == 0) {
// MSG("SET(B-%d TYPE %d) DOF %d -> %d\n", i, type, globalDofIndex, mapGlobalDofIndex);
// MSG("SET(B-%d TYPE %d) DOF %d -> %d\n", i, type, globalDofIndex, mapGlobalDofIndex);
periodicDof[type][globalDofIndex] = mapGlobalDofIndex;
periodicDofAssociations[globalDofIndex].insert(type);
......
......@@ -113,6 +113,11 @@ namespace AMDiS {
return name;
}
inline Mesh* getMesh()
{
return mesh;
}
/// Returns \ref feSpace.
inline const FiniteElemSpace* getFeSpace()
{
......@@ -131,6 +136,11 @@ namespace AMDiS {
return nOverallDofs;
}
inline DofMapping& getMapLocalGlobalDofs()
{
return mapLocalGlobalDofs;
}
/// Maps a local dof to its global index.
inline DegreeOfFreedom mapLocalToGlobal(DegreeOfFreedom dof)
{
......@@ -143,11 +153,20 @@ namespace AMDiS {
return mapLocalDofIndex[dof];
}
/// Returns the periodic mapping for all boundary DOFs in rank.
inline PeriodicDofMap& getPeriodicMapping()
{
return periodicDof;
}
/// Returns for a global dof index its periodic mapping for a given boundary type.
inline int getPeriodicMapping(BoundaryType type, int globalDofIndex)
{
FUNCNAME("MeshDistributor::getPeriodicMapping()");
TEST_EXIT_DBG(periodicDof[type].count(globalDofIndex) == 1)
("Should not happen!\n");
("There is no periodic association for global DOF %d for boundary type %d!\n",
globalDofIndex, type);
return periodicDof[type][globalDofIndex];
}
......@@ -194,6 +213,11 @@ namespace AMDiS {
return mpiRank;
}
inline int getMpiSize()
{
return mpiSize;
}
inline MPI::Intracomm& getMpiComm()
{
return mpiComm;
......
......@@ -10,6 +10,9 @@
// See also license.opensource.txt in the distribution.
#include <vector>
#include <set>
#include "parallel/PetscSolver.h"
#include "parallel/StdMpi.h"
#include "parallel/ParallelDebug.h"
......@@ -53,6 +56,52 @@ namespace AMDiS {
TEST_EXIT(mat)("No DOFMatrix!\n");
typedef map<DegreeOfFreedom, DegreeOfFreedom> DofMapping;
typedef map<BoundaryType, DofMapping> PeriodicDofMap;
StdMpi<PeriodicDofMap> stdMpi(meshDistributor->getMpiComm());
if (meshDistributor->getMpiRank() == 0) {
for (int i = 1; i < meshDistributor->getMpiSize(); i++)
stdMpi.recv(i);
} else {
stdMpi.send(0, meshDistributor->getPeriodicMapping());
}
stdMpi.startCommunication<int>(MPI_INT);
StdMpi<PeriodicDofMap> stdMpi2(meshDistributor->getMpiComm());
PeriodicDofMap overallDofMap;
if (meshDistributor->getMpiRank() == 0) {
overallDofMap = meshDistributor->getPeriodicMapping();
for (int i = 1; i < meshDistributor->getMpiSize(); i++) {
PeriodicDofMap &rankDofMap = stdMpi.getRecvData(i);
for (PeriodicDofMap::iterator it = rankDofMap.begin(); it != rankDofMap.end(); ++it) {
for (DofMapping::iterator dofIt = it->second.begin(); dofIt != it->second.end(); ++dofIt) {
if (overallDofMap[it->first].count(dofIt->first) == 1) {
TEST_EXIT_DBG(overallDofMap[it->first][dofIt->first] == dofIt->second)
("Should not happen!\n");
} else {
overallDofMap[it->first][dofIt->first] = dofIt->second;
}
}
}
}
for (int i = 1; i < meshDistributor->getMpiSize(); i++)
stdMpi2.send(i, overallDofMap);
} else {
stdMpi2.recv(0);
}
stdMpi2.startCommunication<int>(MPI_INT);
if (meshDistributor->getMpiRank() > 0)
overallDofMap = stdMpi2.getRecvData(0);
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits= mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
......@@ -76,107 +125,123 @@ namespace AMDiS {
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
cols.clear();
values.clear();
// Global index of the current row dof.
int globalRowDof = meshDistributor->mapLocalToGlobal(*cursor);
// Test if the current row dof is a periodic dof.
bool periodicRow = meshDistributor->isPeriodicDof(globalRowDof);
// Calculate petsc row index.
int rowIndex = globalRowDof * dispMult + dispAddRow;
if (!periodicRow) {
// Calculate petsc row index.
int rowIndex = globalRowDof * dispMult + dispAddRow;
// === Traverse all non zero entries of the row and produce vector cols ===
// === with the column indices of all row entries and vector values ===
// === with the corresponding values. ===
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
// Calculate the exact position of the column index in the petsc matrix.
int colIndex = globalColDof * dispMult + dispAddCol;
// Set only non zero values.
if (value(*icursor) != 0.0 || rowIndex == colIndex) {
// If the current row is not periodic, but the current dof index is periodic,
// we have to duplicate the value to the other corresponding periodic columns.
if (!periodicRow && meshDistributor->isPeriodicDof(globalColDof)) {
// The value is assign to n matrix entries, therefore, every entry
// has only 1/n value of the original entry.
std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalColDof);
double scalFactor = 1.0 / (perAsc.size() + 1.0);
// Insert original entry.
cols.push_back(colIndex);
values.push_back(value(*icursor) * scalFactor);
// Insert the periodic entries.
for (std::set<int>::iterator perIt = perAsc.begin(); perIt != perAsc.end(); ++perIt) {
int mappedDof = meshDistributor->getPeriodicMapping(*perIt, globalColDof);
cols.push_back(mappedDof * dispMult + dispAddCol);
values.push_back(value(*icursor) * scalFactor);
}
} else {
// The col dof index is not periodic, simple add entry.
cols.push_back(colIndex);
values.push_back(value(*icursor));
globalCols.push_back(globalColDof);
}
}
}
// Global index of the current column index.
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
// Test if the current col dof is a periodic dof.
bool periodicCol = meshDistributor->isPeriodicDof(globalColDof);
if (!periodicCol) {
// Calculate the exact position of the column index in the petsc matrix.
int colIndex = globalColDof * dispMult + dispAddCol;
MatSetValue(petscMatrix, rowIndex, colIndex, value(*icursor), ADD_VALUES);
if (rowIndex == 0 && colIndex == 0) {
MSG("ADDED-C ON ZERO: %f (with no scal)\n", value(*icursor));
}
// === Up to now we have assembled one row. Now, the row must be send to the ===
// === corresponding rows in the petsc matrix. ===
if (periodicRow) {
// The row dof is periodic, so send dof to all the corresponding rows.
std::set<int>& perAsc = meshDistributor->getPerDofAssociations(globalRowDof);
} else {
std::set<int>& perColAsc = meshDistributor->getPerDofAssociations(globalColDof);
std::set<int> perAsc;
for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it)
if (*it >= -3)
perAsc.insert(*it);
double scaledValue = value(*icursor) * std::pow(0.5, static_cast<double>(perAsc.size()));
std::vector<int> newCols;
newCols.push_back(globalColDof);
for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
int nCols = static_cast<int>(newCols.size());
for (int i = 0; i < nCols; i++) {
TEST_EXIT(overallDofMap[*it].count(newCols[i]) > 0)("Should not happen: %d %d\n", *it, newCols[i]);
double scalFactor = 1.0 / (perAsc.size() + 1.0);
for (unsigned int i = 0; i < values.size(); i++)
values[i] *= scalFactor;
newCols.push_back(overallDofMap[*it][newCols[i]]);
}
}
// Send the main row to the petsc matrix.
MatSetValues(petscMatrix, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
for (std::set<int>::iterator perIt = perAsc.begin();
perIt != perAsc.end(); ++perIt) {
std::vector<int> perCols;
perCols.reserve(300);
std::vector<double> perValues;
perValues.reserve(300);
for (unsigned int i = 0; i < cols.size(); i++) {
int tmp = (cols[i] - dispAddCol) / dispMult;
if (meshDistributor->isPeriodicDof(tmp, *perIt)) {
perCols.push_back((meshDistributor->getPeriodicMapping(*perIt, tmp) * dispMult) + dispAddCol);
} else {
perCols.push_back(cols[i]);
for (int i = 0; i < newCols.size(); i++) {
int colIndex = newCols[i] * dispMult + dispAddCol;
MatSetValue(petscMatrix, rowIndex, colIndex, scaledValue, ADD_VALUES);
if (rowIndex == 0 && colIndex == 0) {
MSG("ADDED-B ON ZERO: %f (with scal %f)\n", scaledValue, std::pow(0.5, static_cast<double>(perAsc.size())));
}
}
perValues.push_back(values[i]);
}
int perRowIndex = (meshDistributor->getPeriodicMapping(*perIt, globalRowDof) * dispMult) + dispAddRow;
MatSetValues(petscMatrix, 1, &perRowIndex, perCols.size(),
&(perCols[0]), &(perValues[0]), ADD_VALUES);
}
} else {
// The row dof is not periodic, simply send the row to the petsc matrix.
MatSetValues(petscMatrix, 1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
}
for (icursor_type icursor = begin<nz>(cursor), icend = end<nz>(cursor);
icursor != icend; ++icursor) {
// Global index of the current column index.
int globalColDof = meshDistributor->mapLocalToGlobal(col(*icursor));
std::set<int>& perColAsc = meshDistributor->getPerDofAssociations(globalColDof);
std::set<int> perAsc;
for (std::set<int>::iterator it = perColAsc.begin(); it != perColAsc.end(); ++it)
if (*it >= -3)
perAsc.insert(*it);
std::set<int>& perRowAsc = meshDistributor->getPerDofAssociations(globalRowDof);
for (std::set<int>::iterator it = perRowAsc.begin(); it != perRowAsc.end(); ++it)
if (*it >= -3)
perAsc.insert(*it);
double scaledValue = value(*icursor) * std::pow(0.5, static_cast<double>(perAsc.size()));
std::vector<std::pair<int, int> > entry;
entry.push_back(std::make_pair(globalRowDof, globalColDof));
for (std::set<int>::iterator it = perAsc.begin(); it != perAsc.end(); ++it) {
int nEntry = static_cast<int>(entry.size());
for (int i = 0; i < nEntry; i++) {
// TEST_EXIT(overallDofMap[*it].count(entry[i].first) > 0)
// ("Should not happen A: %d %d\n", *it, entry[i].first);
// TEST_EXIT(overallDofMap[*it].count(entry[i].second) > 0)
// ("Should not happen B: %d %d\n", *it, entry[i].second);
int rd;
if (overallDofMap[*it].count(entry[i].first) > 0)
rd = overallDofMap[*it][entry[i].first];
else
rd = entry[i].first;
int cd;
if (overallDofMap[*it].count(entry[i].second) > 0)
cd = overallDofMap[*it][entry[i].second];
else
cd = entry[i].second;
entry.push_back(std::make_pair(rd, cd));
}
}
int ccc = 0;
for (std::vector<std::pair<int, int> >::iterator eIt = entry.begin(); eIt != entry.end(); ++eIt) {
// Calculate petsc row index.
int rowIndex = eIt->first * dispMult + dispAddRow;
int colIndex = eIt->second * dispMult + dispAddCol;
MatSetValue(petscMatrix, rowIndex, colIndex, scaledValue, ADD_VALUES);
if (rowIndex == 0 && colIndex == 0) {
MSG("ADDED-A ON ZERO: %f (with scal %f not scaled %f) org DOF/COL: %d %d TEST %d\n",
scaledValue, std::pow(0.5, static_cast<double>(perAsc.size())), value(*icursor),
globalRowDof, globalColDof, ccc);
}
ccc++;
}
}
}
}
}
......
......@@ -92,8 +92,6 @@ namespace AMDiS {
size += 2 + it->second.size() * 2;
}
MSG("RET SIZE = %d\n", size);
return size;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment