Commit 66a86905 authored by Thomas Witkowski's avatar Thomas Witkowski

Blub

parent 41a2b884
......@@ -438,9 +438,12 @@ namespace AMDiS {
file << "</VTKFile>\n";
#if HAVE_PARALLEL_DOMAIN_AMDIS
if (MPI::COMM_WORLD.Get_rank() == 0)
if (MPI::COMM_WORLD.Get_rank() == 0) {
vector<string> componentNames;
componentNames.push_back("elvalue");
VtkWriter::writeParallelFile(fname + ".pvtu", MPI::COMM_WORLD.Get_size(),
fname, ".vtu", 1);
fname, ".vtu", componentNames);
}
#endif
}
}
......
......@@ -140,11 +140,17 @@ namespace AMDiS {
vtkWriter.writeFile(fn + paraviewFileExt);
#if HAVE_PARALLEL_DOMAIN_AMDIS
if (MPI::COMM_WORLD.Get_rank() == 0)
if (MPI::COMM_WORLD.Get_rank() == 0) {
vector<string> componentNames;
for (unsigned int i = 0; i < dataCollectors.size(); i++)
componentNames.push_back(dataCollectors[i]->getValues()->getName());
vtkWriter.writeParallelFile(paraFilename + paraviewParallelFileExt,
MPI::COMM_WORLD.Get_size(),
filename, postfix,
dataCollectors.size());
filename,
postfix,
componentNames);
}
#endif
MSG("ParaView file written to %s\n", (fn + paraviewFileExt).c_str());
......
......@@ -68,7 +68,7 @@ namespace AMDiS {
void VtkWriter::writeParallelFile(string name, int nRanks,
string fnPrefix, string fnPostfix,
int nComponents)
vector<string> &componentNames)
{
FUNCNAME("VtkWriter::writeParallelFile()");
......@@ -94,9 +94,10 @@ namespace AMDiS {
<< " </PCells>\n";
file << " <PPointData>\n";
for (int i = 0; i < nComponents; i++)
file << " <PDataArray type=\"Float32\" Name=\"value"
<< i << "\" format=\"ascii\"/>\n";
for (unsigned int i = 0; i < componentNames.size(); i++)
file << " <PDataArray type=\"Float32\" Name=\""
<< componentNames[i]
<< "\" format=\"ascii\"/>\n";
file << " </PPointData>\n";
......@@ -235,11 +236,16 @@ namespace AMDiS {
TEST_EXIT(sPos >= 0)("Failed to find file postfix!\n");
string name = filename.substr(0, sPos);
if (MPI::COMM_WORLD.Get_rank() == 0)
if (MPI::COMM_WORLD.Get_rank() == 0) {
vector<string> componentNames;
for (unsigned int i = 0; i < dcList.size(); i++)
componentNames.push_back(dcList[i]->getValues()->getName());
writer.writeParallelFile(name + ".pvtu",
MPI::COMM_WORLD.Get_size(),
name, ".vtu",
static_cast<int>(dcList.size()));
componentNames);
}
filename = name + "-p" + lexical_cast<string>(MPI::COMM_WORLD.Get_rank()) + "-.vtu";
}
......
......@@ -55,7 +55,7 @@ namespace AMDiS {
/// Writes a pvtu file, which contains the links to all the rank files.
static void writeParallelFile(string name, int nRanks,
string fnPrefix, string fnPostfix,
int nComponents);
vector<string> &componentNames);
/// May be used to simply write ParaView files.
static void writeFile(DOFVector<double> *values,
......
......@@ -178,37 +178,41 @@ namespace AMDiS {
inline Mat& getMatIntInt()
{
return matIntInt;
return mat[0][0];
// return matIntInt;
}
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("PetscSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap.size())
TEST_EXIT_DBG(coarseSpaceMap.size() && mat.size() > 1)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseCoarse;
return mat[1][1];
// return matCoarseCoarse;
}
inline Mat& getMatIntCoarse()
{
FUNCNAME("PetscSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap.size())
TEST_EXIT_DBG(coarseSpaceMap.size() && mat.size() > 1)
("Subdomain solver does not contain a coarse space!\n");
return matIntCoarse;
return mat[0][1];
// return matIntCoarse;
}
inline Mat& getMatCoarseInt()
{
FUNCNAME("PetscSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap.size())
TEST_EXIT_DBG(coarseSpaceMap.size() && mat.size() > 1)
("Subdomain solver does not contain a coarse space!\n");
return matCoarseInt;
return mat[1][0];
// return matCoarseInt;
}
protected:
......@@ -251,7 +255,7 @@ namespace AMDiS {
/// Parallel DOF mapping of the (optional) coarse space. Allows to define
/// different coarse spaces for different components.
std::map<int, ParallelDofMapping*> coarseSpaceMap;
map<int, ParallelDofMapping*> coarseSpaceMap;
int mpiRank;
......@@ -260,7 +264,9 @@ namespace AMDiS {
MPI::Intracomm mpiCommLocal;
/// Petsc's matrix structure.
Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
// Mat matIntInt, matCoarseCoarse, matIntCoarse, matCoarseInt;
vector<vector<Mat> > mat;
/// PETSc's vector structures for the rhs vector, the solution vector and a
/// temporary vector for calculating the final residuum.
......
......@@ -338,14 +338,14 @@ namespace AMDiS {
for (unsigned int i = 0; i < meshDistributor->getFeSpaces().size(); i++) {
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(i);
createPrimals(feSpace);
createDuals(feSpace);
createInterfaceNodes(feSpace);
createIndexB(feSpace);
createIndexB(feSpace);
}
primalDofMap.update();
......@@ -1352,11 +1352,12 @@ namespace AMDiS {
// === Create all sets and indices. ===
vector<const FiniteElemSpace*> feSpaces = getFeSpaces(mat);
initialize(feSpaces);
createFetiData();
// === Create matrices for the FETI-DP method. ===
if (printTimings) {
......
......@@ -199,7 +199,10 @@ namespace AMDiS {
inline bool isInterface(const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
return interfaceDofMap[feSpace].isSet(dof);
if (feSpace == fullInterface)
return interfaceDofMap[feSpace].isSet(dof);
return false;
}
protected:
......
......@@ -16,17 +16,22 @@
namespace AMDiS {
void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
void PetscSolverGlobalBlockMatrix::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::fillPetscMatrix()");
TEST_EXIT_DBG(meshDistributor)("No mesh distributor object defined!\n");
TEST_EXIT_DBG(interiorMap)("No parallel mapping object defined!\n");
TEST_EXIT_DBG(mat)("No DOF matrix defined!\n");
TEST_EXIT_DBG(seqMat)("No DOF matrix defined!\n");
mat.resize(1);
mat[0].resize(1);
Mat &matIntInt = mat[0][0];
double wtime = MPI::Wtime();
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = mat->getNumRows();
nComponents = seqMat->getNumRows();
int nRankRows = (*interiorMap)[feSpace].nRankDofs;
int nOverallRows = (*interiorMap)[feSpace].nOverallDofs;
......@@ -63,9 +68,9 @@ namespace AMDiS {
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j]) {
if ((*seqMat)[i][j]) {
int idx = componentInBlock[i] * nBlocks + componentInBlock[j];
setDofMatrix(nestMat[idx], (*mat)[i][j],
setDofMatrix(nestMat[idx], (*seqMat)[i][j],
compNthInBlock[i], compNthInBlock[j]);
}
......@@ -178,7 +183,7 @@ namespace AMDiS {
if (nestMat[i] != PETSC_NULL)
MatDestroy(&(nestMat[i]));
MatDestroy(&matIntInt);
MatDestroy(&mat[0][0]);
KSPDestroy(&kspInterior);
}
......@@ -196,14 +201,14 @@ namespace AMDiS {
void PetscSolverGlobalBlockMatrix::setDofMatrix(Mat& petscMat,
DOFMatrix* mat,
DOFMatrix* seqMat,
int dispRowBlock,
int dispColBlock)
{
FUNCNAME("PetscSolverGlobalBlockMatrix::setDofMatrix()");
TEST_EXIT(mat)("No DOFMatrix!\n");
TEST_EXIT(petscMat)("No PETSc matrix!\n");
TEST_EXIT(seqMat)("No DOFMatrix!\n");
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
......@@ -211,8 +216,8 @@ namespace AMDiS {
namespace traits = mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
traits::col<Matrix>::type col(mat->getBaseMatrix());
traits::const_value<Matrix>::type value(mat->getBaseMatrix());
traits::col<Matrix>::type col(seqMat->getBaseMatrix());
traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
......@@ -228,8 +233,8 @@ namespace AMDiS {
// === Traverse all rows of the dof matrix and insert row wise the values ===
// === to the PETSc matrix. ===
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()),
cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int rowIndex = (*interiorMap)[feSpace][*cursor].global + dispRowIndex;
......
......@@ -176,12 +176,16 @@ namespace AMDiS {
}
void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *mat)
void PetscSolverSchur::fillPetscMatrix(Matrix<DOFMatrix*> *seqMat)
{
FUNCNAME("PetscSolverSchur::fillPetscMatrix()");
mat.resize(1);
mat[0].resize(1);
Mat &matIntInt = mat[0][0];
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
int nComponents = mat->getNumRows();
int nComponents = seqMat->getNumRows();
updateDofData(nComponents);
int nInteriorRows = nInteriorDofs * nComponents;
......@@ -213,8 +217,8 @@ namespace AMDiS {
for (int i = 0; i < nComponents; i++)
for (int j = 0; j < nComponents; j++)
if ((*mat)[i][j])
setDofMatrix((*mat)[i][j], nComponents, i, j);
if ((*seqMat)[i][j])
setDofMatrix((*seqMat)[i][j], nComponents, i, j);
MatAssemblyBegin(matA11, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matA11, MAT_FINAL_ASSEMBLY);
......@@ -280,7 +284,7 @@ namespace AMDiS {
KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior, matIntInt, matIntInt, SAME_NONZERO_PATTERN);
KSPSetOperators(kspInterior, mat[0][0], mat[0][0], SAME_NONZERO_PATTERN);
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetFromOptions(kspInterior);
......@@ -331,26 +335,26 @@ namespace AMDiS {
MatDestroy(&matA12);
MatDestroy(&matA21);
MatDestroy(&matA22);
MatDestroy(&matIntInt);
MatDestroy(&mat[0][0]);
KSPDestroy(&kspInterior);
}
void PetscSolverSchur::setDofMatrix(DOFMatrix* mat, int dispMult,
void PetscSolverSchur::setDofMatrix(DOFMatrix* seqMat, int dispMult,
int dispAddRow, int dispAddCol)
{
FUNCNAME("PetscSolverSchur::setDofMatrix()");
TEST_EXIT(mat)("No DOFMatrix!\n");
TEST_EXIT(seqMat)("No DOFMatrix!\n");
const FiniteElemSpace* feSpace = meshDistributor->getFeSpace(0);
using mtl::tag::row; using mtl::tag::nz; using mtl::begin; using mtl::end;
namespace traits= mtl::traits;
typedef DOFMatrix::base_matrix_type Matrix;
traits::col<Matrix>::type col(mat->getBaseMatrix());
traits::const_value<Matrix>::type value(mat->getBaseMatrix());
traits::col<Matrix>::type col(seqMat->getBaseMatrix());
traits::const_value<Matrix>::type value(seqMat->getBaseMatrix());
typedef traits::range_generator<row, Matrix>::type cursor_type;
typedef traits::range_generator<nz, cursor_type>::type icursor_type;
......@@ -362,8 +366,8 @@ namespace AMDiS {
valuesBoundary.reserve(300);
valuesInterior.reserve(300);
for (cursor_type cursor = begin<row>(mat->getBaseMatrix()),
cend = end<row>(mat->getBaseMatrix()); cursor != cend; ++cursor) {
for (cursor_type cursor = begin<row>(seqMat->getBaseMatrix()),
cend = end<row>(seqMat->getBaseMatrix()); cursor != cend; ++cursor) {
// Global index of the current row DOF.
int globalRowDof = (*interiorMap)[feSpace][*cursor].global;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment