diff --git a/AMDiS/CMakeLists.txt b/AMDiS/CMakeLists.txt index 15cb4acfafc663b394420715d68140412daaed9e..1d00e569d41fc96e5bb053eabcb0399de58face9 100644 --- a/AMDiS/CMakeLists.txt +++ b/AMDiS/CMakeLists.txt @@ -1,3 +1,4 @@ +set(CMAKE_LEGACY_CYGWIN_WIN32 0) cmake_minimum_required(VERSION 2.8 FATAL_ERROR) project(AMDiS) @@ -5,8 +6,8 @@ if (USE_NEW_CMAKE AND ${CMAKE_VERSION} VERSION_GREATER "3.0") # Experimental implementation of new CMakeLists.txt # Can be selected by calling cmake -DUSE_NEW_CMAKE=ON . # Use only if you know what you are doing :-) - include(${AMDiS_SOURCE_DIR}/cmake3/CMakeLists_3.0.txt) + include(${AMDiS_SOURCE_DIR}/cmake3/CMakeLists.txt) else () # By defualt the classical CMakeLists file ist used - include(${AMDiS_SOURCE_DIR}/cmake/CMakeLists_2.8.txt) + include(${AMDiS_SOURCE_DIR}/cmake/CMakeLists.txt) endif () diff --git a/AMDiS/cmake/CMakeLists_2.8.txt b/AMDiS/cmake/CMakeLists.txt similarity index 100% rename from AMDiS/cmake/CMakeLists_2.8.txt rename to AMDiS/cmake/CMakeLists.txt diff --git a/AMDiS/cmake3/AMDISConfig.cmake.in b/AMDiS/cmake3/AMDISConfig.cmake.in index 58ce23b3470e64e771c7e95de38b569dcb23c12e..77150dda7b9be65f16bac7784ab6a1532a5ac2aa 100644 --- a/AMDiS/cmake3/AMDISConfig.cmake.in +++ b/AMDiS/cmake3/AMDISConfig.cmake.in @@ -20,19 +20,22 @@ endif () list(APPEND CMAKE_MODULE_PATH ${AMDIS_DIR}) +set(IS_AMDISCONFIG true) + set(AMDIS_NEED_ZOLTAN @ENABLE_ZOLTAN@) set(AMDIS_NEED_CXX11 @ENABLE_CXX11@) set(AMDIS_NEED_UMFPACK @ENABLE_UMFPACK@) set(AMDIS_HAS_PARALLEL_DOMAIN @ENABLE_PARALLEL_DOMAIN@) set(AMDIS_NEED_COMPRESSION @ENABLE_COMPRESSION@) +set(BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@) add_library(amdis_base INTERFACE) add_library(AMDiS ALIAS amdis_base) target_compile_definitions(amdis_base INTERFACE - $<$<CONFIG:Debug>:DEBUG=1> - $<$<CONFIG:Release>:DEBUG=0>) + $<$<CONFIG:Release>:DEBUG=0> + $<$<NOT:$<CONFIG:Release>>:DEBUG=1>) if (AMDIS_NEED_CXX11) target_compile_features(amdis_base INTERFACE @@ -55,7 +58,7 @@ find_file(_AMDIS_H AMDiS.h PATHS ${AMDIS_DIR}/../../include/amdis/) if (_AMDIS_H) get_filename_component(AMDIS_INCLUDE_DIR ${_AMDIS_H} PATH CACHE) target_include_directories(amdis_base INTERFACE ${AMDIS_INCLUDE_DIR}) - target_include_directories(amdis_base INTERFACE ${AMDIS_INCLUDE_DIR}/mtl4) + include(find_mtl4) else () message(ERROR "Could not detect the AMDiS include directory. Please set the variable AMDIS_INCLUDE_DIR to the directory containing the AMDiS headers.") endif () @@ -75,80 +78,51 @@ unset(_AMDIS_LIB CACHE) # Boost libraries # --------------- -set(BOOST_LIBS_REQUIRED system iostreams filesystem program_options date_time) -if (WIN32) - list(APPEND BOOST_LIBS_REQUIRED zlib) - if (AMDIS_NEED_COMPRESSION) - list(APPEND BOOST_LIBS_REQUIRED bzip2) - endif () -endif () - -set(BOOST_VERSION "1.48") -find_package(Boost ${BOOST_VERSION} REQUIRED ${BOOST_LIBS_REQUIRED}) - -if (Boost_FOUND) - target_include_directories(amdis_base INTERFACE ${Boost_INCLUDE_DIR}) - target_link_libraries(amdis_base INTERFACE ${Boost_LIBRARIES}) -else () - message(ERROR "Boost libraries not found") -endif(Boost_FOUND) - +if (NOT BOOST_ROOT) + set(BOOST_ROOT @BOOST_ROOT@) +endif (NOT BOOST_ROOT) +if (NOT BOOST_LIBRARYDIR) + set(BOOST_LIBRARYDIR @BOOST_LIBRARYDIR@) +endif (NOT BOOST_LIBRARYDIR) +include(find_boost) # umfpack library # -------------------- if (AMDIS_NEED_UMFPACK) + set(AMDIS_SuiteSparse_DIR @SuiteSparse_DIR@) set(AMDIS_UMFPACK_INCLUDE_DIR @UMFPACK_INCLUDE_DIRS@) - set(AMDIS_UMFPACK_LIB_DIR ${AMDIS_UMFPACK_INCLUDE_DIR}/../lib) - find_file(UMFPACK_H umfpack.h HINTS ${AMDIS_UMFPACK_INCLUDE_DIR}) - find_library(UMFPACK_LIBRARY umfpack HINTS ${AMDIS_UMFPACK_LIB_DIR}) - if (UMFPACK_H AND UMFPACK_LIBRARY) - get_filename_component(UMFPACK_INCLUDE_DIRS ${UMFPACK_H} PATH) - target_include_directories(amdis_base INTERFACE ${UMFPACK_INCLUDE_DIRS}) - - find_library(AMD_LIBRARY amd HINTS ${AMDIS_UMFPACK_LIB_DIR}) - find_library(BLAS_LIBRARY blas HINTS ${AMDIS_UMFPACK_LIB_DIR}) - if (AMD_LIBRARY AND BLAS_LIBRARY) - target_link_libraries(amdis_base INTERFACE - ${UMFPACK_LIBRARY} - ${AMD_LIBRARY} - ${BLAS_LIBRARY}) - # TODO: add test whether umfpack is configured successfully - endif() - else() - message(FATAL_ERROR "Could not find the UMFPACK library") - endif() + set(AMDIS_UMFPACK_LIB_DIR ${AMDIS_UMFPACK_INCLUDE_DIR}/../include) + include(find_umfpack) endif (AMDIS_NEED_UMFPACK) # Parallel amdis # -------------- if (AMDIS_HAS_PARALLEL_DOMAIN) + target_compile_definitions(amdis_base INTERFACE + HAVE_PARALLEL_DOMAIN_AMDIS=1) + find_package(MPI REQUIRED) if (MPI_FOUND) - target_include_directories(amdis_base INTERFACE ${MPI_INCLUDE_PATH}) - target_compile_options(amdis_base INTERFACE ${MPI_COMPILE_FLAGS}) - target_link_libraries(amdis_base INTERFACE ${MPI_LIBRARIES}) + target_include_directories(amdis_base INTERFACE ${MPI_INCLUDE_PATH}) + target_compile_options(amdis_base INTERFACE ${MPI_COMPILE_FLAGS}) + target_link_libraries(amdis_base INTERFACE ${MPI_LIBRARIES}) endif (MPI_FOUND) set(PETSC_EXECUTABLE_RUNS ON) set(AMDIS_PETSC_DIR @PETSC_DIR@) include(find_petsc) if (PETSc_FOUND) - target_include_directories(amdis_base INTERFACE ${PETSC_INCLUDES}) - target_link_libraries(amdis_base INTERFACE - ${PETSC_LIBRARY_SYS} - ${PETSC_LIBRARIES}) + target_include_directories(amdis_base INTERFACE ${PETSC_INCLUDES}) + target_link_libraries(amdis_base INTERFACE + ${PETSC_LIBRARY_SYS} + ${PETSC_LIBRARIES} + blas lapack) + target_compile_definitions(amdis_base INTERFACE + HAVE_PARALLEL_PETSC=1) else() - message(FATAL_ERROR "Could not find PETSc!") + message(FATAL_ERROR "Could not find PETSc!") endif (PETSc_FOUND) - - find_library(_AMDIS_PARALLEL_LIB amdis_parallel PATHS ${AMDIS_DIR}/../../lib/amdis/) - if (_AMDIS_PARALLEL_LIB) - target_link_libraries(amdis_base INTERFACE ${_AMDIS_PARALLEL_LIB}) - else () - message(ERROR "Could not detect the AMDiS::parallel library. Please set the variable _AMDIS_PARALLEL_LIB to this library.") - endif () - unset(_AMDIS_PARALLEL_LIB CACHE) endif (AMDIS_HAS_PARALLEL_DOMAIN) @@ -187,6 +161,7 @@ else () endif () unset(_MUPARSER_LIB CACHE) + # Zoltan library # -------------- if (AMDIS_NEED_ZOLTAN) diff --git a/AMDiS/cmake3/CMakeLists_3.0.txt b/AMDiS/cmake3/CMakeLists.txt similarity index 81% rename from AMDiS/cmake3/CMakeLists_3.0.txt rename to AMDiS/cmake3/CMakeLists.txt index 1431a75a9d241f6b74df343e29e541b3529cd865..eebe5204f47683a9865087c40ac66d88767fcd3a 100644 --- a/AMDiS/cmake3/CMakeLists_3.0.txt +++ b/AMDiS/cmake3/CMakeLists.txt @@ -1,24 +1,37 @@ cmake_minimum_required(VERSION 3.2 FATAL_ERROR) project(AMDiS VERSION 0.9 LANGUAGES CXX) +message(WARNING "This is an experimental CMakeLists.txt file.") + set(BASE_DIR ${AMDiS_SOURCE_DIR}) set(SOURCE_DIR ${BASE_DIR}/src) list(APPEND CMAKE_MODULE_PATH ${BASE_DIR}/cmake3) -option(BUILD_SHARED_LIBS "Build all libraries as shared or static, default: shared" true) option(ENABLE_CXX11 "Enable C++11 compiler features" false) option(ENABLE_PARALLEL_DOMAIN "Use parallel domain decomposition" false) -option(ENABLE_UMFPACK "Use of UMFPACK solver" false) -option(ENABLE_COMPRESSION "Use output compression" false) +option(ENABLE_UMFPACK "Enable support for the UMFPACK solver" false) +option(ENABLE_COMPRESSION "Use output compression for vtu and arh files" false) +option(ENABLE_EXTENSIONS "Use extensions for AMDiS" false) +option(BUILD_SHARED_LIBS "Build all libraries as shared or static, default: shared" ON) +if (MSVC) +# if (${CMAKE_VERSION} VERSION_LESS "3.4") + set(BUILD_SHARED_LIBS OFF) + mark_as_advanced(BUILD_SHARED_LIBS) + message(WARNING "Currently we can only build static libraries wiht Visual Studio") +# elseif (BUILD_SHARED_LIBS) +# set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS true) +# set(MSVC_SHARED_LIBS true) +# endif () +endif (MSVC) add_library(amdis_base INTERFACE) add_library(AMDiS::base ALIAS amdis_base) target_include_directories(amdis_base INTERFACE ${SOURCE_DIR}) target_compile_definitions(amdis_base INTERFACE - $<$<CONFIG:Debug>:DEBUG=1> - $<$<CONFIG:Release>:DEBUG=0>) + $<$<CONFIG:Release>:DEBUG=0> + $<$<NOT:$<CONFIG:Release>>:DEBUG=1>) if (ENABLE_CXX11) target_compile_features(amdis_base INTERFACE @@ -37,7 +50,10 @@ endif (ENABLE_CXX11) # ------------------------------------------------------------------------------ include(find_mtl4) # -> target mtl4 +install(FILES ${BASE_DIR}/cmake3/find_mtl4.cmake DESTINATION share/amdis/) + include(find_boost) # -> target boost +install(FILES ${BASE_DIR}/cmake3/find_boost.cmake DESTINATION share/amdis/) # ------------------------------------------------------------------------------ @@ -65,7 +81,6 @@ add_library(amdis ${SOURCE_DIR}/DOFIndexed.cc ${SOURCE_DIR}/DOFMatrix.cc ${SOURCE_DIR}/DOFVector.cc - ${SOURCE_DIR}/Debug.cc ${SOURCE_DIR}/DirichletBC.cc ${SOURCE_DIR}/DualTraverse.cc ${SOURCE_DIR}/ElInfo.cc @@ -82,8 +97,6 @@ add_library(amdis ${SOURCE_DIR}/FirstOrderTerm.cc ${SOURCE_DIR}/FixVec.cc ${SOURCE_DIR}/Global.cc - ${SOURCE_DIR}/GlobalDOFNumbering.cc - ${SOURCE_DIR}/GlobalElementNumbering.cc ${SOURCE_DIR}/Initfile.cc ${SOURCE_DIR}/Lagrange.cc ${SOURCE_DIR}/LeafData.cc @@ -100,7 +113,6 @@ add_library(amdis ${SOURCE_DIR}/ProblemInstat.cc ${SOURCE_DIR}/ProblemInterpol.cc ${SOURCE_DIR}/ProblemStat.cc - ${SOURCE_DIR}/ProblemStatDbg.cc ${SOURCE_DIR}/Projection.cc ${SOURCE_DIR}/QPInfo.cc ${SOURCE_DIR}/QPsiPhi.cc @@ -167,29 +179,39 @@ add_library(amdis ${SOURCE_DIR}/time/RosenbrockAdaptInstationary.cc ${SOURCE_DIR}/time/RosenbrockMethod.cc ${SOURCE_DIR}/time/RosenbrockStationary.cc - ) +) + +add_library(amdis_debug INTERFACE) +target_sources(amdis_debug INTERFACE + ${SOURCE_DIR}/Debug.cc + ${SOURCE_DIR}/GlobalDOFNumbering.cc # not used by any other class + ${SOURCE_DIR}/GlobalElementNumbering.cc # not used by any other class + ${SOURCE_DIR}/ProblemStatDbg.cc +) if (ENABLE_COMPRESSION) target_compile_definitions(amdis PRIVATE HAVE_COMPRESSION=1) endif (ENABLE_COMPRESSION) + +if (ENABLE_UMFPACK) + target_compile_definitions(amdis PRIVATE HAVE_UMFPACK=1) +endif (ENABLE_UMFPACK) -include(amdis_parallel) # -> target AMDiS::parallel +include(amdis_parallel) # adds sources to amdis include(amdis_reinit) # -> target AMDiS::reinit include(amdis_compositeFEM) # -> target AMDiS::compositeFEM +include(amdis_extensions) # -> target AMDiS::extensions include(muparser) # -> target muparser if (MSVC) target_compile_definitions(amdis PRIVATE - _SCL_SECURE_NO_WARNINGS - _CRT_SECURE_NO_WARNINGS) + _SCL_SECURE_NO_WARNINGS + _CRT_SECURE_NO_WARNINGS) endif (MSVC) -target_link_libraries(amdis AMDiS::base AMDiS::reinit AMDiS::compositeFEM muparser) -if (ENABLE_PARALLEL_DOMAIN) - target_link_libraries(amdis AMDiS::parallel) -endif (ENABLE_PARALLEL_DOMAIN) - +target_link_libraries(amdis AMDiS::base muparser + $<$<CONFIG:Debug>:amdis_debug>) # specify how to install this target: # ----------------------------------- @@ -213,7 +235,3 @@ configure_file(${BASE_DIR}/cmake3/AMDISConfig.cmake.in ) install(FILES ${AMDiS_BINARY_DIR}/AMDISConfig.cmake DESTINATION share/amdis/) - -# file(GLOB CMAKE_FILES "${BASE_DIR}/cmake3/*.cmake") -# install(FILES ${CMAKE_FILES} DESTINATION share/amdis/) - diff --git a/AMDiS/cmake3/amdis_compositeFEM.cmake b/AMDiS/cmake3/amdis_compositeFEM.cmake index 33a6e3406e96415521f30fa9be0f1e357582c201..76cfebac2bec2bd29f3ab26519680c847c332621 100644 --- a/AMDiS/cmake3/amdis_compositeFEM.cmake +++ b/AMDiS/cmake3/amdis_compositeFEM.cmake @@ -16,7 +16,7 @@ add_library(AMDiS::compositeFEM ALIAS amdis_compositeFEM) target_compile_definitions(amdis_compositeFEM PUBLIC HAVE_COMPOSITE_FEM=1) target_include_directories(amdis_compositeFEM PRIVATE ${COMPOSITE_SOURCE_DIR}) -target_link_libraries(amdis_compositeFEM amdis_base) +target_link_libraries(amdis_compositeFEM amdis) # specify how to install this target: # ----------------------------------- diff --git a/AMDiS/cmake3/amdis_extensions.cmake b/AMDiS/cmake3/amdis_extensions.cmake new file mode 100644 index 0000000000000000000000000000000000000000..9bd76608f1fdbbdacecc4a948ce7626fab522420 --- /dev/null +++ b/AMDiS/cmake3/amdis_extensions.cmake @@ -0,0 +1,97 @@ + +if (ENABLE_EXTENSIONS) + option(ENABLE_BASE_PROBLEMS "Use base_problems" true) + + find_path(EXTENSIONS_DIR NAMES Helpers.h + HINTS ${BASE_DIR}/../extensions + DOC "Path to AMDiS extensions.") + if (EXTENSIONS_DIR) + if (NOT EXISTS ${EXTENSIONS_DIR}/Helpers.h OR NOT EXISTS ${EXTENSIONS_DIR}/ExtendedProblemStat.h) + message(FATAL_ERROR "Wrong extensions directory! Directory must contain the files 'Helpers.h' and 'ExtendedProblemStath'") + endif () + + add_library(amdis_extensions + ${EXTENSIONS_DIR}/Helpers.cc + ${EXTENSIONS_DIR}/BackgroundMesh.cc + ${EXTENSIONS_DIR}/GeometryTools.cc + ${EXTENSIONS_DIR}/POperators.cc + ${EXTENSIONS_DIR}/SingularDirichletBC2.cc + ${EXTENSIONS_DIR}/time/ExtendedRosenbrockStationary.cc + ${EXTENSIONS_DIR}/pugixml/src/pugixml.cpp + ) + add_library(AMDiS::extensions ALIAS amdis_extensions) + + target_compile_definitions(amdis_extensions PRIVATE HAVE_EXTENSIONS=1) + target_include_directories(amdis_extensions PRIVATE + ${EXTENSIONS_DIR} + ${EXTENSIONS_DIR}/time + ${EXTENSIONS_DIR}/nanoflann + ${EXTENSIONS_DIR}/pugixml/src + ) + + set(INSTALL_SUBDIRS . time preconditioner nanflann) + + if (ENABLE_SEQ_PETSC) + target_sources(amdis_extensions PRIVATE + ${EXTENSIONS_DIR}/preconditioner/PetscPreconPfc.cc + ${EXTENSIONS_DIR}/preconditioner/PetscPreconPfcDiag.cc + ${EXTENSIONS_DIR}/preconditioner/PetscPreconCahnHilliard.cc) + endif (ENABLE_SEQ_PETSC) + + if (ENABLE_PARALLEL_DOMAIN) + target_sources(amdis_extensions PRIVATE + ${EXTENSIONS_DIR}/preconditioner/PetscSolverPfc.cc + ${EXTENSIONS_DIR}/preconditioner/PetscSolverPfc_diag.cc) + target_link_libraries(amdis_extensions amdis_parallel) + endif (ENABLE_PARALLEL_DOMAIN) + + if (ENABLE_BASE_PROBLEMS) + target_sources(amdis_extensions PRIVATE + ${EXTENSIONS_DIR}/base_problems/CahnHilliard.cc + ${EXTENSIONS_DIR}/base_problems/CahnHilliard_RB.cc + ${EXTENSIONS_DIR}/base_problems/CahnHilliardNavierStokes.cc + ${EXTENSIONS_DIR}/base_problems/CahnHilliardNavierStokes_RB.cc + ${EXTENSIONS_DIR}/base_problems/CahnHilliardNavierStokes_TwoPhase.cc + ${EXTENSIONS_DIR}/base_problems/CahnHilliardNavierStokes_TwoPhase_RB.cc + ${EXTENSIONS_DIR}/base_problems/DiffuseDomainFsi.cc + ${EXTENSIONS_DIR}/base_problems/LinearElasticity.cc + ${EXTENSIONS_DIR}/base_problems/LinearElasticityPhase.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokesCahnHilliard.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokesPhase_TaylorHood.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokes_TaylorHood.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokes_TaylorHood_RB.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokes_TH_MultiPhase.cc + ${EXTENSIONS_DIR}/base_problems/NavierStokes_TH_MultiPhase_RB.cc + ${EXTENSIONS_DIR}/base_problems/PhaseFieldCrystal.cc + ${EXTENSIONS_DIR}/base_problems/PhaseFieldCrystal_Phase.cc + ${EXTENSIONS_DIR}/base_problems/PhaseFieldCrystal_RB.cc + ${EXTENSIONS_DIR}/base_problems/PolarizationField.cc + ${EXTENSIONS_DIR}/base_problems/QuasiCrystal.cc + ${EXTENSIONS_DIR}/base_problems/QuasiCrystal_RB.cc) + # ${EXTENSIONS_DIR}/base_problems/NavierStokes_Chorin.cc + # ${EXTENSIONS_DIR}/base_problems/NavierStokesPhase_Chorin.cc + # ${EXTENSIONS_DIR}/base_problems/VacancyPhaseFieldCrystal.cc + + target_compile_definitions(amdis_extensions PRIVATE HAVE_BASE_PROBLEMS=1) + target_include_directories(amdis_extensions PRIVATE ${EXTENSIONS_DIR}/base_problems) + + list(APPEND INSTALL_SUBDIRS base_problems) + endif (ENABLE_BASE_PROBLEMS) + + target_link_libraries(amdis_extensions amdis amdis_reinit) + + # specify how to install this target: + # ----------------------------------- + + foreach (SUBDIR ${INSTALL_SUBDIRS}) + file(GLOB HEADERS "${EXTENSIONS_DIR}/${SUBDIR}/*.h*") + install(FILES ${HEADERS} DESTINATION include/amdis/extensions/${SUBDIR}/) + endforeach () + + file(GLOB HEADERS "${EXTENSIONS_DIR}/pugixml/src/*.hpp") + install(FILES ${HEADERS} DESTINATION include/amdis/extensions/pugixml/) + + install(TARGETS amdis_extensions DESTINATION lib/amdis/) + + endif (EXTENSIONS_DIR) +endif (ENABLE_EXTENSIONS) diff --git a/AMDiS/cmake3/amdis_parallel.cmake b/AMDiS/cmake3/amdis_parallel.cmake index 60c9a588901420847bcd09112c2c79bf9fa19467..f6f6a5199da3c44f787c156472b8c2357681f6fd 100644 --- a/AMDiS/cmake3/amdis_parallel.cmake +++ b/AMDiS/cmake3/amdis_parallel.cmake @@ -2,99 +2,112 @@ if (ENABLE_PARALLEL_DOMAIN) option(ENABLE_ZOLTAN "Add support for the Parallel Partitioning suite Zoltan" false) option(ENABLE_PARALLEL_SOLVERS "Add some problem dependent solver, e.g. Feti, Navier-Stokes and Cahn-Hilliard" false) - add_library(amdis_parallel - ${SOURCE_DIR}/parallel/BddcMlSolver.cc - ${SOURCE_DIR}/parallel/DofComm.cc - ${SOURCE_DIR}/parallel/CheckerPartitioner.cc - ${SOURCE_DIR}/parallel/ElementObjectDatabase.cc - ${SOURCE_DIR}/parallel/InteriorBoundary.cc - ${SOURCE_DIR}/parallel/MeshDistributor.cc - ${SOURCE_DIR}/parallel/MeshLevelData.cc - ${SOURCE_DIR}/parallel/MeshManipulation.cc - ${SOURCE_DIR}/parallel/MeshPartitioner.cc - ${SOURCE_DIR}/parallel/MpiHelper.cc - ${SOURCE_DIR}/parallel/ParallelDebug.cc - ${SOURCE_DIR}/parallel/ParallelDofMapping.cc - ${SOURCE_DIR}/parallel/ParallelProblemStat.cc - ${SOURCE_DIR}/parallel/ParallelSolver.cc - ${SOURCE_DIR}/parallel/PeriodicMap.cc - ${SOURCE_DIR}/parallel/ParMetisPartitioner.cc - ${SOURCE_DIR}/parallel/StdMpi.cc - ${SOURCE_DIR}/parallel/ZoltanPartitioner.cc + add_library(amdis_parallel INTERFACE) + target_sources(amdis PRIVATE + ${SOURCE_DIR}/parallel/DofComm.cc + ${SOURCE_DIR}/parallel/CheckerPartitioner.cc + ${SOURCE_DIR}/parallel/ElementObjectDatabase.cc + ${SOURCE_DIR}/parallel/InteriorBoundary.cc + ${SOURCE_DIR}/parallel/MeshDistributor.cc + ${SOURCE_DIR}/parallel/MeshLevelData.cc + ${SOURCE_DIR}/parallel/MeshManipulation.cc + ${SOURCE_DIR}/parallel/MeshPartitioner.cc + ${SOURCE_DIR}/parallel/MpiHelper.cc + ${SOURCE_DIR}/parallel/ParallelDofMapping.cc + ${SOURCE_DIR}/parallel/ParallelProblemStat.cc + ${SOURCE_DIR}/parallel/ParallelSolver.cc + ${SOURCE_DIR}/parallel/PeriodicMap.cc + ${SOURCE_DIR}/parallel/ParMetisPartitioner.cc + ${SOURCE_DIR}/parallel/StdMpi.cc + ) + + target_sources(amdis_debug INTERFACE + ${SOURCE_DIR}/parallel/ParallelDebug.cc ) - add_library(AMDiS::parallel ALIAS amdis_parallel) - target_compile_definitions(amdis_parallel PUBLIC - HAVE_PARALLEL_DOMAIN_AMDIS=1) - target_link_libraries(amdis_parallel amdis_base) + target_compile_definitions(amdis_parallel INTERFACE + HAVE_PARALLEL_DOMAIN_AMDIS=1) # MPI is required find_package(MPI REQUIRED) if (MPI_FOUND) - target_include_directories(amdis_parallel PUBLIC - ${MPI_INCLUDE_PATH}) - target_compile_options(amdis_parallel PUBLIC - ${MPI_COMPILE_FLAGS}) + target_include_directories(amdis_parallel INTERFACE + ${MPI_INCLUDE_PATH}) + target_compile_options(amdis_parallel INTERFACE + ${MPI_COMPILE_FLAGS}) endif (MPI_FOUND) # PETSc library is required set(PETSC_EXECUTABLE_RUNS ON) include(find_petsc) if (PETSc_FOUND) - target_include_directories(amdis_parallel PUBLIC - ${PETSC_DIR}/include - ${PETSC_DIR}/${PETSC_ARCH}/include) - - # parmetis is required - find_file(PARMETIS_HEADER_FILE "parmetis.h" HINTS ${PETSC_DIR}/include ) - if (PARMETIS_HEADER_FILE) - get_filename_component(PARMETIS_INCLUDE_PATH "${PARMETIS_HEADER_FILE}" PATH CACHE) - target_include_directories(amdis_parallel PUBLIC ${PARMETIS_INCLUDE_PATH}) - else() - message(FATAL_ERROR "Could not find ParMetis header file 'parmetis.h'!") - endif (PARMETIS_HEADER_FILE) - - # add support for the zoltan library - if (ENABLE_ZOLTAN) - find_file(ZOLTAN_HEADER_FILE "zoltan_cpp.h" HINTS ${PETSC_DIR}/include) - if (ZOLTAN_HEADER_FILE) - get_filename_component(ZOLTAN_HEADER_DIR "${ZOLTAN_HEADER_FILE}" PATH CACHE) - target_include_directories(amdis_parallel PRIVATE ${ZOLTAN_HEADER_DIR}) - target_compile_definitions(amdis_parallel PRIVATE HAVE_ZOLTAN=1) - else() - message(FATAL_ERROR "Could not find Zoltan include file 'zoltan_cpp.h'!") - endif(ZOLTAN_HEADER_FILE) - endif (ENABLE_ZOLTAN) - - # add some more source-files that need petsc - target_sources(amdis_parallel PRIVATE - ${SOURCE_DIR}/parallel/MatrixNnzStructure.cc - ${SOURCE_DIR}/parallel/ParallelCoarseSpaceSolver.cc - ${SOURCE_DIR}/parallel/PetscHelper.cc - ${SOURCE_DIR}/parallel/PetscSolver.cc - ${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc - ${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc - ${SOURCE_DIR}/solver/PetscTypes.cc - ) - - if (ENABLE_PARALLEL_SOLVERS) - target_sources(amdis_parallel PRIVATE - ${SOURCE_DIR}/parallel/PetscSolverFeti.cc - ${SOURCE_DIR}/parallel/PetscSolverFetiDebug.cc - ${SOURCE_DIR}/parallel/PetscSolverFetiMonitor.cc - ${SOURCE_DIR}/parallel/PetscSolverFetiOperators.cc - ${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc - ${SOURCE_DIR}/parallel/PetscSolverNavierStokes.cc - ${SOURCE_DIR}/parallel/PetscSolverNSCH.cc - ${SOURCE_DIR}/parallel/PetscSolverCahnHilliard2.cc - ${SOURCE_DIR}/parallel/PetscSolverCahnHilliard.cc - ${SOURCE_DIR}/parallel/PetscSolverSchur.cc - ) - endif (ENABLE_PARALLEL_SOLVERS) - - target_compile_definitions(amdis_parallel PUBLIC - HAVE_PARALLEL_PETSC=1 - PETSC_VERSION=${PETSC_VERSION}) + target_include_directories(amdis_parallel INTERFACE + ${PETSC_DIR}/include + ${PETSC_DIR}/${PETSC_ARCH}/include) + + # parmetis is required + find_file(PARMETIS_HEADER_FILE "parmetis.h" HINTS ${PETSC_DIR}/include ) + if (PARMETIS_HEADER_FILE) + get_filename_component(PARMETIS_INCLUDE_PATH "${PARMETIS_HEADER_FILE}" PATH CACHE) + target_include_directories(amdis_parallel INTERFACE ${PARMETIS_INCLUDE_PATH}) + else() + message(FATAL_ERROR "Could not find ParMetis header file 'parmetis.h'!") + endif (PARMETIS_HEADER_FILE) + + # add support for the zoltan library + if (ENABLE_ZOLTAN) + find_file(ZOLTAN_HEADER_FILE "zoltan_cpp.h" HINTS ${PETSC_DIR}/include) + if (ZOLTAN_HEADER_FILE) + get_filename_component(ZOLTAN_HEADER_DIR "${ZOLTAN_HEADER_FILE}" PATH CACHE) + target_include_directories(amdis_parallel PRIINTERFACEVATE ${ZOLTAN_HEADER_DIR}) + else() + message(FATAL_ERROR "Could not find Zoltan include file 'zoltan_cpp.h'!") + endif(ZOLTAN_HEADER_FILE) + + target_compile_definitions(amdis_parallel INTERFACE HAVE_ZOLTAN=1) + target_sources(amdis PRIVATE + ${SOURCE_DIR}/parallel/ZoltanPartitioner.cc) + endif (ENABLE_ZOLTAN) + + if (ENABLE_BDDCML) + target_compile_definitions(amdis_parallel INTERFACE HAVE_BDDCML=1) + target_sources(amdis PRIVATE + ${SOURCE_DIR}/parallel/BddcMlSolver.cc) + endif (ENABLE_BDDCML) + + # add some more source-files that need petsc + target_sources(amdis PRIVATE + ${SOURCE_DIR}/parallel/MatrixNnzStructure.cc + ${SOURCE_DIR}/parallel/ParallelCoarseSpaceSolver.cc + ${SOURCE_DIR}/parallel/PetscHelper.cc + ${SOURCE_DIR}/parallel/PetscSolver.cc + ${SOURCE_DIR}/parallel/PetscSolverGlobalMatrix.cc + ${SOURCE_DIR}/parallel/PetscSolverGlobalBlockMatrix.cc + ${SOURCE_DIR}/solver/PetscTypes.cc + ) + + if (ENABLE_PARALLEL_SOLVERS) + target_sources(amdis PRIVATE + ${SOURCE_DIR}/parallel/PetscSolverFeti.cc + ${SOURCE_DIR}/parallel/PetscSolverFetiDebug.cc + ${SOURCE_DIR}/parallel/PetscSolverFetiMonitor.cc + ${SOURCE_DIR}/parallel/PetscSolverFetiOperators.cc + ${SOURCE_DIR}/parallel/PetscSolverFetiTimings.cc + ${SOURCE_DIR}/parallel/PetscSolverNavierStokes.cc + ${SOURCE_DIR}/parallel/PetscSolverNSCH.cc + ${SOURCE_DIR}/parallel/PetscSolverCahnHilliard2.cc + ${SOURCE_DIR}/parallel/PetscSolverCahnHilliard.cc + ${SOURCE_DIR}/parallel/PetscSolverSchur.cc + ) + target_compile_definitions(amdis_parallel INTERFACE + HAVE_PARALLEL_SOLVERS=1) + endif (ENABLE_PARALLEL_SOLVERS) + + target_compile_definitions(amdis_parallel INTERFACE + HAVE_PARALLEL_PETSC=1 + PETSC_VERSION=${PETSC_VERSION}) + + target_link_libraries(amdis amdis_parallel ${PETSC_LIBRARIES} blas lapack) endif (PETSc_FOUND) # specify how to install this target: @@ -102,11 +115,10 @@ if (ENABLE_PARALLEL_DOMAIN) file(GLOB AMDIS_PARALLEL_HEADERS "${SOURCE_DIR}/parallel/*.h") install(FILES ${AMDIS_PARALLEL_HEADERS} DESTINATION include/amdis/parallel/) - install(TARGETS amdis_parallel DESTINATION lib/amdis/ ) install(FILES - ${BASE_DIR}/cmake3/ResolveCompilerPaths.cmake - ${BASE_DIR}/cmake3/FindPackageMultipass.cmake - ${BASE_DIR}/cmake3/find_petsc.cmake - DESTINATION share/amdis/) + ${BASE_DIR}/cmake3/ResolveCompilerPaths.cmake + ${BASE_DIR}/cmake3/FindPackageMultipass.cmake + ${BASE_DIR}/cmake3/find_petsc.cmake + DESTINATION share/amdis/) endif (ENABLE_PARALLEL_DOMAIN) diff --git a/AMDiS/cmake3/amdis_reinit.cmake b/AMDiS/cmake3/amdis_reinit.cmake index 862cb62abc90800781f6d864d5d6ac240f4f3848..7e94a0ab62ea9c983623d17a5366fe5849b83284 100644 --- a/AMDiS/cmake3/amdis_reinit.cmake +++ b/AMDiS/cmake3/amdis_reinit.cmake @@ -8,8 +8,8 @@ add_library(AMDiS::reinit ALIAS amdis_reinit) target_compile_definitions(amdis_reinit PUBLIC HAVE_REINIT=1) -target_include_directories(amdis_reinit PRIVATE ${REINIT_SOURCE_DIR}) -target_link_libraries(amdis_reinit amdis_base) +target_include_directories(amdis_reinit PUBLIC ${REINIT_SOURCE_DIR}) +target_link_libraries(amdis_reinit amdis) # specify how to install this target: # ----------------------------------- diff --git a/AMDiS/cmake3/find_boost.cmake b/AMDiS/cmake3/find_boost.cmake index 19eb347e2cf982810eb1fd38005497f7eb5d8d4d..887976c155f7d6c44c9af312df31d6fa5e754cf0 100644 --- a/AMDiS/cmake3/find_boost.cmake +++ b/AMDiS/cmake3/find_boost.cmake @@ -1,7 +1,25 @@ -find_package(Boost 1.44 REQUIRED) +set(BOOST_VERSION "1.48") +set(BOOST_LIBS_REQUIRED system iostreams filesystem program_options date_time) +if (WIN32) + list(APPEND BOOST_LIBS_REQUIRED zlib) + if (ENABLE_COMPRESSION OR AMDIS_NEED_COMPRESSION) + list(APPEND BOOST_LIBS_REQUIRED bzip2) + endif (ENABLE_COMPRESSION OR AMDIS_NEED_COMPRESSION) +endif (WIN32) + +if (NOT BUILD_SHARED_LIBS) + set(Boost_USE_STATIC_LIBS ON) +endif (NOT BUILD_SHARED_LIBS) +find_package(Boost ${BOOST_VERSION} REQUIRED ${BOOST_LIBS_REQUIRED}) if (Boost_FOUND) - add_library(boost INTERFACE) - target_include_directories(boost INTERFACE ${Boost_INCLUDE_DIR}) - - target_link_libraries(amdis_base INTERFACE boost) + add_library(boost INTERFACE) + target_include_directories(boost INTERFACE ${Boost_INCLUDE_DIR}) + target_link_libraries(boost INTERFACE ${Boost_LIBRARIES}) + + target_link_libraries(amdis_base INTERFACE boost) + + if (MSVC_SHARED_LIBS) + link_directories(${Boost_LIBRARY_DIRS}) + target_compile_definitions(amdis_base INTERFACE ${Boost_LIB_DIAGNOSTIC_DEFINITIONS}) + endif (MSVC_SHARED_LIBS) endif (Boost_FOUND) diff --git a/AMDiS/cmake3/find_mtl4.cmake b/AMDiS/cmake3/find_mtl4.cmake index bf5b4867926874fd325a7ea9f16905ae45ce26b9..f273dc1a66fdfcda99b772d636cc63d45e012102 100644 --- a/AMDiS/cmake3/find_mtl4.cmake +++ b/AMDiS/cmake3/find_mtl4.cmake @@ -1,48 +1,50 @@ -find_package(MTL REQUIRED HINTS ${BASE_DIR}/lib/mtl4) -# hide options of MTL4 and use the AMDiS replacement -mark_as_advanced(ENABLE_CXX_ELEVEN) -mark_as_advanced(ENABLE_SHORT_ELE_PROD) -mark_as_advanced(ENABLE_VAMPIR) -if (MTL_FOUND) - add_library(mtl4 INTERFACE) +add_library(mtl4 INTERFACE) + +if (IS_AMDISCONFIG) + target_include_directories(mtl4 INTERFACE ${AMDIS_INCLUDE_DIR}/mtl4) +else () + target_include_directories(mtl4 INTERFACE ${BASE_DIR}/lib/mtl4) +endif (IS_AMDISCONFIG) + +if (ENABLE_CXX11) + target_compile_features(mtl4 INTERFACE + cxx_rvalue_references + cxx_auto_type + cxx_range_for + cxx_generalized_initializers + cxx_static_assert + cxx_defaulted_functions) - target_include_directories(mtl4 INTERFACE - $<BUILD_INTERFACE:${MTL_INCLUDE_DIRS}> - $<INSTALL_INTERFACE:include/amdis/mtl4>) - - if (ENABLE_CXX11) - string (REPLACE "-std=c++0x" "" MTL_CXX_DEFINITIONS "${MTL_CXX_DEFINITIONS}") - string (REPLACE "-std=c++11" "" MTL_CXX_DEFINITIONS "${MTL_CXX_DEFINITIONS}") - string (REPLACE "-std=gnu++0x" "" MTL_CXX_DEFINITIONS "${MTL_CXX_DEFINITIONS}") - string (REPLACE "-std=gnu++11" "" MTL_CXX_DEFINITIONS "${MTL_CXX_DEFINITIONS}") - target_compile_features(mtl4 INTERFACE - cxx_rvalue_references - cxx_auto_type - cxx_range_for - cxx_generalized_initializers - cxx_static_assert - cxx_defaulted_functions) - set(ENABLE_CXX_ELEVEN true) - else () - set(ENABLE_CXX_ELEVEN false) - endif () - - if (ENABLE_UMFPACK) - target_compile_definitions(mtl4 INTERFACE HAVE_UMFPACK=1) - endif (ENABLE_UMFPACK) - - target_compile_options(mtl4 INTERFACE ${MTL_CXX_DEFINITIONS}) - target_link_libraries(mtl4 INTERFACE ${MTL_LIBRARIES}) - + set (CXX_ELEVEN_FEATURE_LIST "MOVE" "AUTO" "RANGEDFOR" "INITLIST" "STATICASSERT" "DEFAULTIMPL") + foreach (feature ${CXX_ELEVEN_FEATURE_LIST}) + target_compile_definitions(mtl4 INTERFACE MTL_WITH_${feature}) + endforeach () +endif (ENABLE_CXX11) + + +if (ENABLE_OPENMP) + find_package(OpenMP REQUIRED) + if (OPENMP_FOUND) + target_compile_definitions(mtl4 INTERFACE MTL_WITH_OPENMP) + target_compile_options(mtl4 INTERFACE ${OpenMP_CXX_FLAGS}) + else () + message(FATAL_ERROR "OpenMP not found") + endif (OPENMP_FOUND) +endif (ENABLE_OPENMP) + + +if (NOT IS_AMDISCONFIG) + include(find_umfpack) + # specify how to install this target: # ----------------------------------- - install(DIRECTORY ${MTL_DIR}/ - DESTINATION include/amdis/mtl4/ - FILES_MATCHING PATTERN "*.hpp" - PATTERN ".svn" EXCLUDE - PATTERN ".svn/*" EXCLUDE - PATTERN "mtl4/libs" EXCLUDE - PATTERN "mtl4/extern" EXCLUDE) - - target_link_libraries(amdis_base INTERFACE mtl4) -endif (MTL_FOUND) + install(DIRECTORY ${BASE_DIR}/lib/mtl4/ + DESTINATION include/amdis/mtl4/ + FILES_MATCHING PATTERN "*.hpp" + PATTERN ".svn" EXCLUDE + PATTERN ".svn/*" EXCLUDE + PATTERN "mtl4/libs" EXCLUDE + PATTERN "mtl4/extern" EXCLUDE) +endif (NOT IS_AMDISCONFIG) + +target_link_libraries(amdis_base INTERFACE mtl4) diff --git a/AMDiS/cmake3/find_petsc.cmake b/AMDiS/cmake3/find_petsc.cmake index 9b1683844459df05971a43ca0215dbb6d7bc5e29..3bef0fb85d1f2652d7606cd178a2e33a851e4dba 100644 --- a/AMDiS/cmake3/find_petsc.cmake +++ b/AMDiS/cmake3/find_petsc.cmake @@ -1,6 +1,6 @@ +# source: https://github.com/jedbrown/cmake-modules/blob/master/FindPETSc.cmake # - Try to find PETSc # Once done this will define -# source: https://github.com/jedbrown/cmake-modules/blob/master/FindPETSc.cmake # # PETSC_FOUND - system has PETSc # PETSC_INCLUDES - the PETSc include directories @@ -76,8 +76,13 @@ find_package_multipass (PETSc petsc_config_current # Determine whether the PETSc layout is old-style (through 2.3.3) or # new-style (>= 3.0.0) if (EXISTS "${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h") # > 2.3.3 - set (petsc_conf_rules "${PETSC_DIR}/conf/rules") - set (petsc_conf_variables "${PETSC_DIR}/conf/variables") + if (EXISTS "${PETSC_DIR}/conf/rules") + set (petsc_conf_rules "${PETSC_DIR}/conf/rules") + set (petsc_conf_variables "${PETSC_DIR}/conf/variables") + else () + set (petsc_conf_rules "${PETSC_DIR}/lib/petsc/conf/rules") + set (petsc_conf_variables "${PETSC_DIR}/lib/petsc/conf/variables") + endif () elseif (EXISTS "${PETSC_DIR}/bmake/${PETSC_ARCH}/petscconf.h") # <= 2.3.3 set (petsc_conf_rules "${PETSC_DIR}/bmake/common/rules") set (petsc_conf_variables "${PETSC_DIR}/bmake/common/variables") diff --git a/AMDiS/cmake3/find_umfpack.cmake b/AMDiS/cmake3/find_umfpack.cmake new file mode 100644 index 0000000000000000000000000000000000000000..1e82d293d1c4121324a26a51c37e1d4bccac9a9e --- /dev/null +++ b/AMDiS/cmake3/find_umfpack.cmake @@ -0,0 +1,84 @@ +if (ENABLE_UMFPACK OR AMDIS_NEED_UMFPACK) + if (WIN32) + set(SuiteSparse_USE_LAPACK_BLAS ON) + endif (WIN32) + + # try to use a cmake-package of suitesparse + find_package(SuiteSparse QUIET HINTS ${AMDIS_SuiteSparse_DIR}) + if (SuiteSparse_FOUND) + include(${USE_SuiteSparse}) + target_link_libraries(amdis_base INTERFACE ${SuiteSparse_LIBRARIES}) + target_include_directories(amdis_base INTERFACE ${SuiteSparse_INCLUDE_DIR} ${SuiteSparse_METIS_INCLUDE_DIR}) + set(FOUND_SUITESPARSE_LIBS ${SuiteSparse_LIBRARIES}) + else (SuiteSparse_FOUND) + # find umfpack manually by searching for umfpack.h header file + find_library(UMFPACK_LIBRARY umfpack + HINTS ${AMDIS_UMFPACK_LIB_DIR} + DOC "Library file for UMFPACK") + find_file(UMFPACK_H umfpack.h + HINTS ${AMDIS_UMFPACK_INCLUDE_DIR} ENV CPATH /usr/include /usr/include/suitesparse /usr/include/ufsparse + DOC "Headerfile umfpack.h for UMFPACK") + + if (UMFPACK_H AND UMFPACK_LIBRARY) + get_filename_component(UMFPACK_PATH ${UMFPACK_H} PATH) + get_filename_component(UMFPACK_LIB_PATH ${UMFPACK_LIBRARY} PATH) + set(FOUND_SUITESPARSE_LIBS ${UMFPACK_LIBRARY}) + + # find all connected libraries + find_library(AMD_LIBRARY amd HINTS ${UMFPACK_LIB_PATH}) + find_library(BLAS_LIBRARY NAMES blas openblas HINTS ${UMFPACK_LIB_PATH} /usr/lib /usr/lib/openblas-base) + find_library(CHOLMOD_LIBRARY cholmod HINTS ${UMFPACK_LIB_PATH}) + find_library(COLAMD_LIBRARY colamd HINTS ${UMFPACK_LIB_PATH}) + find_library(SUITESPARSECONFIG_LIBRARY suitesparseconfig HINTS ${UMFPACK_LIB_PATH}) + if (AMD_LIBRARY AND BLAS_LIBRARY) + list(APPEND FOUND_SUITESPARSE_LIBS ${AMD_LIBRARY} ${BLAS_LIBRARY}) + endif (AMD_LIBRARY AND BLAS_LIBRARY) + if (CHOLMOD_LIBRARY) + list(APPEND FOUND_SUITESPARSE_LIBS ${CHOLMOD_LIBRARY}) + endif (CHOLMOD_LIBRARY) + if (COLAMD_LIBRARY) + list(APPEND FOUND_SUITESPARSE_LIBS ${COLAMD_LIBRARY}) + endif (COLAMD_LIBRARY) + if (SUITESPARSECONFIG_LIBRARY) + list(APPEND FOUND_SUITESPARSE_LIBS ${SUITESPARSECONFIG_LIBRARY}) + endif (SUITESPARSECONFIG_LIBRARY) + + target_include_directories(amdis_base INTERFACE ${UMFPACK_PATH}) + target_link_libraries(amdis_base INTERFACE ${FOUND_SUITESPARSE_LIBS}) + else() + message(FATAL_ERROR "Could not find the UMFPACK header umfpack.h.") + endif (UMFPACK_H AND UMFPACK_LIBRARY) + endif (SuiteSparse_FOUND) + + + # Check for clock_gettime in librt + if (NOT WIN32) + include(CheckLibraryExists) + check_library_exists(rt clock_gettime "time.h" HAVE_CLOCK_GETTIME) + if (HAVE_CLOCK_GETTIME) + target_link_libraries(amdis_base INTERFACE rt) + else () + check_library_exists(c clock_gettime "" HAVE_CLOCK_GETTIME) + endif (HAVE_CLOCK_GETTIME) + endif (NOT WIN32) + + + # collect informations about umfpack version and found libraries + if (FOUND_SUITESPARSE_LIBS) + find_file(_UMFPACK_H umfpack.h HINTS ${SuiteSparse_INCLUDE_DIR} ${UMFPACK_PATH}) + file(STRINGS ${_UMFPACK_H} UMFPACK_VERSION_LINE REGEX "#define UMFPACK_VERSION") + string(REGEX MATCH "\"UMFPACK V?([^\"]+)\"" UMFPACK_VERSION_REGEX ${UMFPACK_VERSION_LINE}) + set(UMFPACK_VERSION ${CMAKE_MATCH_1}) + + message(STATUS "UMFPACK version: ${UMFPACK_VERSION}") + message(STATUS "Found the following SuiteSparse libraries:") + foreach (lib ${FOUND_SUITESPARSE_LIBS}) + message(STATUS " ${lib}") + endforeach () + endif (FOUND_SUITESPARSE_LIBS) + + target_compile_definitions(amdis_base INTERFACE HAVE_UMFPACK=1) + target_compile_definitions(amdis_base INTERFACE MTL_HAS_UMFPACK=1) + + install(FILES ${BASE_DIR}/cmake3/find_umfpack.cmake DESTINATION share/amdis/) +endif (ENABLE_UMFPACK OR AMDIS_NEED_UMFPACK) \ No newline at end of file diff --git a/AMDiS/src/Expressions.h b/AMDiS/src/Expressions.h index 284cf63a1ad7513ee2ec46826216bd393b8d3788..01c24a0d08cb3ff4c274830b15d2f65184000f4e 100644 --- a/AMDiS/src/Expressions.h +++ b/AMDiS/src/Expressions.h @@ -428,7 +428,7 @@ struct GenericSecondOrderTerm_ij : public GenericOperatorTerm<Term, 2> void weakEval(const std::vector<WorldVector<double> > &grdUhAtQP, std::vector<WorldVector<double> > &result) { - int nPoints = grdUhAtQP.size(); + int nPoints = (int)grdUhAtQP.size(); for (int iq = 0; iq < nPoints; iq++) result[iq][row] += grdUhAtQP[iq][col] * this->term(iq); } diff --git a/AMDiS/src/config/Config_msc.h b/AMDiS/src/config/Config_msc.h index dd8428fd712e9efed2294204282012f5605ca2c6..2579fedce7d7a9c88887595be5518c8cc807906e 100644 --- a/AMDiS/src/config/Config_msc.h +++ b/AMDiS/src/config/Config_msc.h @@ -32,6 +32,7 @@ #define COMPILER_NAME "msc" #define COMPILER_VERSION MSC_VERSION +#define COMPILER_IS_MSVC // alignement specification // ------------------------ diff --git a/AMDiS/src/parallel/MeshDistributor.cc b/AMDiS/src/parallel/MeshDistributor.cc index 8f35318d210c361a4ee063f6ad46907f598aa4b6..3bc8877063c23bbf56ae8488588299e3e6c68b77 100644 --- a/AMDiS/src/parallel/MeshDistributor.cc +++ b/AMDiS/src/parallel/MeshDistributor.cc @@ -29,7 +29,9 @@ #include "parallel/MeshDistributor.h" #include "parallel/MeshManipulation.h" +#if (DEBUG != 0) #include "parallel/ParallelDebug.h" +#endif #include "parallel/StdMpi.h" #include "parallel/MeshPartitioner.h" #include "parallel/ParMetisPartitioner.h" @@ -61,7 +63,9 @@ #include "ProblemStat.h" #include "ProblemInstat.h" #include "RefinementManager3d.h" +#if (DEBUG != 0) #include "Debug.h" +#endif #include "Timer.h" #include "io/MacroReader.h" @@ -314,14 +318,9 @@ namespace AMDiS { namespace Parallel { for (size_t i = 0; i < meshes.size(); i++) { debug::createSortedDofs(meshes[i], elMap[i]); } -#endif if (mpiRank == 0) { -#if (DEBUG != 0) int writePartMesh = 1; -#else - int writePartMesh = 0; -#endif Parameters::get("parallel->debug->write mesh partitioning", writePartMesh); if (writePartMesh > 0) { @@ -329,6 +328,7 @@ namespace AMDiS { namespace Parallel { ParallelDebug::writePartitioning(*this, debugOutputDir + "part"); } } +#endif // Create interior boundary information. createInteriorBoundary(true); @@ -1637,14 +1637,12 @@ namespace AMDiS { namespace Parallel { for (size_t i = 0; i < meshes.size(); i++) ParallelDebug::testDoubleDofs(meshes[i]); int writePartMesh = 1; -#else - int writePartMesh = 0; -#endif Parameters::get("parallel->debug->write part mesh", writePartMesh); if (writePartMesh > 0 && repartitioningCounter == 0) ParallelDebug::writePartitioningFile(debugOutputDir + "partitioning", repartitioningCounter, feSpaces[0]); +#endif repartitioningCounter++; @@ -2262,8 +2260,10 @@ namespace AMDiS { namespace Parallel { intBoundary.create(levelData, elObjDb); +#if (DEBUG != 0) for (int level = 0; level < levelData.getNumberOfLevels(); level++) ParallelDebug::printBoundaryInfo(intBoundary[level]); +#endif if (firstCall) { int tmpSend = static_cast<int>(intBoundary[0].hasPeriodic()); @@ -2436,15 +2436,15 @@ namespace AMDiS { namespace Parallel { Timer t; #if (DEBUG != 0) bool printInfo = true; -#else - bool printInfo = false; -#endif Parameters::get("parallel->print dofmap info", printInfo); +#endif for (size_t i = 0; i < dofMaps.size(); i++) { dofMaps[i]->update(); +#if (DEBUG != 0) if (printInfo) dofMaps[i]->printInfo(); +#endif } // === Create periodic DOF maps, if there are periodic boundaries. === @@ -2456,6 +2456,7 @@ namespace AMDiS { namespace Parallel { dofMaps[i]->updateMatIndex(); } +#if (DEBUG != 0) if (printInfo) { int test = 0; Parameters::get("parallel->remove periodic boundary", test); @@ -2492,7 +2493,9 @@ namespace AMDiS { namespace Parallel { ParallelDebug::testGlobalIndexByCoords(*this, mesh); } - } else { + } else +#else + { // int tmp = 0; // Parameters::get(name + "->write parallel debug file", tmp); @@ -2520,6 +2523,7 @@ namespace AMDiS { namespace Parallel { // debugOutputDir + "mpi-dbg", "dat"); } } +#endif MPI::COMM_WORLD.Barrier(); MSG("Update dof mapping needed %.5f seconds\n", t.elapsed()); diff --git a/AMDiS/src/parallel/MeshDistributor.h b/AMDiS/src/parallel/MeshDistributor.h index 26ae91942ade665dd9cd27ee81328cbe599c98d1..e33c4ede9110ecd2b4e2a6bec5f52f62905491ab 100644 --- a/AMDiS/src/parallel/MeshDistributor.h +++ b/AMDiS/src/parallel/MeshDistributor.h @@ -283,69 +283,69 @@ namespace AMDiS { namespace Parallel { } -/** \brief - * Synchronize \p vec using indicator vector \p additionalVecs, e.g. let - * additionalVecs[0] be 1 on all dofs the value should be taken from -my rank - * and 0 elsewhere. - */ -// op(std::vector<T>& out, std::vector<T> const& in) -template<typename T, typename Operator> -void synchMultiVector(DOFVector<T> &vec, std::vector<DOFVector<T>*> additionalVecs, Operator op) -{ - // get FE space and check equal FE space - const FiniteElemSpace *fe = vec.getFeSpace(); - MultiLevelDofComm& dofComm = dofComms[fe->getMesh()]; - - typedef typename std::vector<DOFVector<T>*>::iterator Iterator; - - int nLevels = levelData.getNumberOfLevels(); - for (int level = nLevels - 1; level >= 0; level--) - { - StdMpi < std::vector<std::vector<T> > > - stdMpi(levelData.getMpiComm(level)); + /** \brief + * Synchronize \p vec using indicator vector \p additionalVecs, e.g. let + * additionalVecs[0] be 1 on all dofs the value should be taken from + my rank + * and 0 elsewhere. + */ + // op(std::vector<T>& out, std::vector<T> const& in) + template<typename T, typename Operator> + void synchMultiVector(DOFVector<T> &vec, std::vector<DOFVector<T>*> additionalVecs, Operator op) + { + // get FE space and check equal FE space + const FiniteElemSpace *fe = vec.getFeSpace(); + MultiLevelDofComm& dofComm = dofComms[fe->getMesh()]; - for (DofComm::Iterator it(dofComm[level].getRecvDofs(), fe); - !it.end(); it.nextRank()) + typedef typename std::vector<DOFVector<T>*>::iterator Iterator; + + int nLevels = levelData.getNumberOfLevels(); + for (int level = nLevels - 1; level >= 0; level--) { - std::vector<std::vector<T> > dofs; - dofs.reserve(it.getDofs().size()); - for (; !it.endDofIter(); it.nextDof()) - { - std::vector<T> values; - values.reserve(additionalVecs.size() + 1); - values.push_back( vec[it.getDofIndex()] ); - for (Iterator vecIt = additionalVecs.begin(); vecIt != - additionalVecs.end(); ++vecIt ) - values.push_back( (**vecIt)[it.getDofIndex()] ); - dofs.push_back( values ); - } - stdMpi.send(it.getRank(), dofs); - } + StdMpi < std::vector<std::vector<T> > > + stdMpi(levelData.getMpiComm(level)); + + for (DofComm::Iterator it(dofComm[level].getRecvDofs(), fe); + !it.end(); it.nextRank()) + { + std::vector<std::vector<T> > dofs; + dofs.reserve(it.getDofs().size()); + for (; !it.endDofIter(); it.nextDof()) + { + std::vector<T> values; + values.reserve(additionalVecs.size() + 1); + values.push_back( vec[it.getDofIndex()] ); + for (Iterator vecIt = additionalVecs.begin(); vecIt != + additionalVecs.end(); ++vecIt ) + values.push_back( (**vecIt)[it.getDofIndex()] ); + dofs.push_back( values ); + } + stdMpi.send(it.getRank(), dofs); + } - for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); - !it.end(); it.nextRank()) - stdMpi.recv(it.getRank()); + for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); + !it.end(); it.nextRank()) + stdMpi.recv(it.getRank()); - stdMpi.startCommunication(); + stdMpi.startCommunication(); - for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); - !it.end(); it.nextRank()) { - for (; !it.endDofIter(); it.nextDof()) { - std::vector<T> values; - values.reserve(additionalVecs.size() + 1); - values.push_back( vec[it.getDofIndex()] ); - for (Iterator vecIt = additionalVecs.begin(); vecIt != - additionalVecs.end(); ++vecIt ) - values.push_back( (**vecIt)[it.getDofIndex()] ); + for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); + !it.end(); it.nextRank()) { + for (; !it.endDofIter(); it.nextDof()) { + std::vector<T> values; + values.reserve(additionalVecs.size() + 1); + values.push_back( vec[it.getDofIndex()] ); + for (Iterator vecIt = additionalVecs.begin(); vecIt != + additionalVecs.end(); ++vecIt ) + values.push_back( (**vecIt)[it.getDofIndex()] ); - op(values, stdMpi.getRecvData(it.getRank())[it.getDofCounter()]); - } - } - } + op(values, stdMpi.getRecvData(it.getRank())[it.getDofCounter()]); + } + } + } - synchVector(vec); -} + synchVector(vec); + } @@ -491,65 +491,6 @@ void synchMultiVector(DOFVector<T> &vec, std::vector<DOFVector<T>*> additionalVe synchVector(allDOFVectors); } - - /** \brief - * Synchronize \p vec using indicator vector \p additionalVecs, e.g. let - * additionalVecs[0] be 1 on all dofs the value should be taken from my rank - * and 0 elsewhere. - */ - // op(std::vector<T>& out, std::vector<T> const& in) - template<typename T, typename Operator> - void synchMultiVector(DOFVector<T> &vec, std::vector<DOFVector<T>*> additionalVecs, Operator op) - { - // get FE space and check equal FE space - const FiniteElemSpace *fe = vec.getFeSpace(); - MultiLevelDofComm& dofComm = dofComms[fe->getMesh()]; - - typedef typename std::vector<DOFVector<T>*>::iterator Iterator; - - int nLevels = levelData.getNumberOfLevels(); - for (int level = nLevels - 1; level >= 0; level--) - { - StdMpi < std::vector<std::vector<T> > > stdMpi(levelData.getMpiComm(level)); - - for (DofComm::Iterator it(dofComm[level].getRecvDofs(), fe); !it.end(); it.nextRank()) - { - std::vector<std::vector<T> > dofs; - dofs.reserve(it.getDofs().size()); - for (; !it.endDofIter(); it.nextDof()) - { - std::vector<T> values; - values.reserve(additionalVecs.size() + 1); - values.push_back( vec[it.getDofIndex()] ); - for (Iterator vecIt = additionalVecs.begin(); vecIt != additionalVecs.end(); ++vecIt ) - values.push_back( (**vecIt)[it.getDofIndex()] ); - dofs.push_back( values ); - } - stdMpi.send(it.getRank(), dofs); - } - - for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); !it.end(); it.nextRank()) - stdMpi.recv(it.getRank()); - - stdMpi.startCommunication(); - - for (DofComm::Iterator it(dofComm[level].getSendDofs(), fe); - !it.end(); it.nextRank()) { - for (; !it.endDofIter(); it.nextDof()) { - std::vector<T> values; - values.reserve(additionalVecs.size() + 1); - values.push_back( vec[it.getDofIndex()] ); - for (Iterator vecIt = additionalVecs.begin(); vecIt != additionalVecs.end(); ++vecIt ) - values.push_back( (**vecIt)[it.getDofIndex()] ); - - op(values, stdMpi.getRecvData(it.getRank())[it.getDofCounter()]); - } - } - } - - synchVector(vec); - } - /** \brief * This function must be used if the values of a DOFVector must be * synchronised over all ranks. That means, that each rank sends the @@ -1037,6 +978,7 @@ void synchMultiVector(DOFVector<T> &vec, std::vector<DOFVector<T>*> additionalVe static MeshDistributor *globalMeshDistributor; friend class ParallelDebug; + }; } } diff --git a/AMDiS/src/parallel/ParallelProblemStat.cc b/AMDiS/src/parallel/ParallelProblemStat.cc index f264e451650988fe8659fc6a015720ed7c0d4cdb..1196a378e555f5ef8255cf0c93bc3b552ab4ef59 100644 --- a/AMDiS/src/parallel/ParallelProblemStat.cc +++ b/AMDiS/src/parallel/ParallelProblemStat.cc @@ -19,29 +19,34 @@ ******************************************************************************/ -#include "parallel/ParallelProblemStat.h" -#include "parallel/ParallelSolver.h" -#include "parallel/MeshDistributor.h" -#include "parallel/MpiHelper.h" +#include <parallel/ParallelProblemStat.h> +#include <parallel/ParallelSolver.h> +#include <parallel/MeshDistributor.h> +#include <parallel/MpiHelper.h> -#include "parallel/ParallelMapper.h" -#include "solver/LinearSolverInterface.h" +#include <parallel/ParallelMapper.h> +#include <solver/LinearSolverInterface.h> #ifdef HAVE_PARALLEL_MTL4 - #include "parallel/PITL_Solver.h" - #include "solver/KrylovPreconditioner.h" + #include <parallel/PITL_Solver.h> + #include <solver/KrylovPreconditioner.h> #elif defined HAVE_PARALLEL_PETSC - #include "parallel/PetscSolverFeti.h" - #include "parallel/PetscSolverSchur.h" - #include "parallel/PetscSolverGlobalBlockMatrix.h" - #include "parallel/PetscSolverGlobalMatrix.h" - #include "parallel/PetscSolverNavierStokes.h" + #include <parallel/PetscSolverGlobalMatrix.h> + #include <parallel/PetscSolverGlobalBlockMatrix.h> + #if defined HAVE_PARALLEL_SOLVERS + #include <parallel/PetscSolverFeti.h> + #include <parallel/PetscSolverSchur.h> + #include <parallel/PetscSolverNavierStokes.h> + #include <parallel/PetscSolverNSCH.h> + #include <parallel/PetscSolverCahnHilliard.h> + #include <parallel/PetscSolverCahnHilliard2.h> + #endif #endif -#include "Global.h" +#include <Global.h> #if defined HAVE_PARALLEL_PETSC || defined HAVE_SEQ_PETSC - #include "solver/PetscTypes.h" + #include <solver/PetscTypes.h> #endif namespace AMDiS { namespace Parallel { @@ -179,12 +184,13 @@ namespace AMDiS { namespace Parallel { CreatorMap< LinearSolverInterface >::addCreator("p_petsc_" + it->first, creator); } - creator = new PetscSolverSchur::Creator; - CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-schur", creator); - creator = new PetscSolverGlobalBlockMatrix::Creator; CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-block", creator); +#if defined HAVE_PARALLEL_SOLVERS + creator = new PetscSolverSchur::Creator; + CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-schur", creator); + creator = new PetscSolverFeti::Creator; CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-feti", creator); @@ -192,6 +198,12 @@ namespace AMDiS { namespace Parallel { CreatorMap< LinearSolverInterface >::addCreator("p_petsc_petsc-navierstokes", creator); CreatorMap< LinearSolverInterface >::addCreator("petsc-navierstokes", creator); + CreatorMap< LinearSolverInterface >::addCreator("p_petsc-ch", new PetscSolverCahnHilliard::Creator); + CreatorMap< LinearSolverInterface >::addCreator("p_petsc-ch2", new PetscSolverCahnHilliard2::Creator); + + CreatorMap< LinearSolverInterface >::addCreator("p_petsc-nsch", new PetscSolverNSCH::Creator); +#endif + #elif defined HAVE_BDDC_ML creator = new BddcMlSolver::Creator; CreatorMap< LinearSolverInterface >::addCreator("bddcml", creator); diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index 6d6767cef8ad7359281fa7c8dbfb4715d5b8fa82..0bfe062a0b29563d1027baf5ea78a5cc4b4f28f3 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -1505,10 +1505,17 @@ namespace AMDiS { namespace Parallel { VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), localDofMap.getRankDofs(), nGlobalOverallInterior, &(lumpedData->tmp_vec_b0)); +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(mat_duals_duals, PETSC_NULL, + &(lumpedData->tmp_vec_duals0)); + MatCreateVecs(mat_duals_duals, PETSC_NULL, + &(lumpedData->tmp_vec_duals1)); +#else MatGetVecs(mat_duals_duals, PETSC_NULL, &(lumpedData->tmp_vec_duals0)); MatGetVecs(mat_duals_duals, PETSC_NULL, &(lumpedData->tmp_vec_duals1)); +#endif for (int component = 0; component < static_cast<int>(componentSpaces.size()); component++) { @@ -1628,12 +1635,21 @@ namespace AMDiS { namespace Parallel { VecCreateMPI(meshDistributor->getMeshLevelData().getMpiComm(meshLevel), localDofMap.getRankDofs(), nGlobalOverallInterior, &(fetiDirichletPreconData.tmp_vec_b)); +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(mat_duals_duals, PETSC_NULL, + &(fetiDirichletPreconData.tmp_vec_duals0)); + MatCreateVecs(mat_duals_duals, PETSC_NULL, + &(fetiDirichletPreconData.tmp_vec_duals1)); + MatCreateVecs(mat_interior_interior, PETSC_NULL, + &(fetiDirichletPreconData.tmp_vec_interior)); +#else MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals0)); MatGetVecs(mat_duals_duals, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_duals1)); MatGetVecs(mat_interior_interior, PETSC_NULL, &(fetiDirichletPreconData.tmp_vec_interior)); +#endif TEST_EXIT_DBG(subDomainIsLocal) ("Should not happen, check usage of localDofMap!\n"); @@ -2483,14 +2499,23 @@ namespace AMDiS { namespace Parallel { createVec(primalDofMap, tmp_primal1); Vec tmp_lagrange; +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange); +#else MatGetVecs(mat_lagrange, PETSC_NULL, &tmp_lagrange); +#endif // === Create RHS and solution vectors. === Vec vecRhs, vecSol; Vec vecRhsLagrange, vecSolLagrange; +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); + MatCreateVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange); +#else MatGetVecs(mat_lagrange, PETSC_NULL, &vecRhsLagrange); MatGetVecs(mat_lagrange, PETSC_NULL, &vecSolLagrange); +#endif vecRhs = vecRhsLagrange; vecSol = vecSolLagrange; diff --git a/AMDiS/src/parallel/PetscSolverFetiOperators.cc b/AMDiS/src/parallel/PetscSolverFetiOperators.cc index 19a96e751ca7f527fd5f73d6345e25111d886901..963f5549e79af28a52bb339f421162f9b40eb449 100644 --- a/AMDiS/src/parallel/PetscSolverFetiOperators.cc +++ b/AMDiS/src/parallel/PetscSolverFetiOperators.cc @@ -404,7 +404,11 @@ namespace AMDiS { namespace Parallel { FetiData* data = static_cast<FetiData*>(ctx); Vec vec_mu0, vec_mu1; +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(*(data->mat_augmented_lagrange), PETSC_NULL, &vec_mu0); +#else MatGetVecs(*(data->mat_augmented_lagrange), PETSC_NULL, &vec_mu0); +#endif VecDuplicate(vec_mu0, &vec_mu1); MatMultTranspose(*(data->mat_lagrange), x, data->tmp_vec_b0); diff --git a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc index 11c031a3b442b8c8176caf22278514bf34c21109..1e01f1fb21912e60853dbb654d79a7b90c0ed5e3 100644 --- a/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc +++ b/AMDiS/src/parallel/PetscSolverGlobalMatrix.cc @@ -940,7 +940,11 @@ namespace AMDiS { namespace Parallel { if (test) { Vec tmp; +#if (PETSC_VERSION_MINOR >= 6) + MatCreateVecs(getMatInterior(), &tmp, PETSC_NULL); +#else MatGetVecs(getMatInterior(), &tmp, PETSC_NULL); +#endif MatMult(getMatInterior(), nullSpaceBasis, tmp); PetscReal n; VecNorm(tmp, NORM_2, &n); diff --git a/AMDiS/src/parallel/PetscSolverNSCH.cc b/AMDiS/src/parallel/PetscSolverNSCH.cc index de5cec017f23c536f749a89a780e3cbe6357fb60..eac2b70d551a5137d709c665a3697b998840c66b 100644 --- a/AMDiS/src/parallel/PetscSolverNSCH.cc +++ b/AMDiS/src/parallel/PetscSolverNSCH.cc @@ -37,7 +37,7 @@ namespace AMDiS { namespace Parallel { void *ctx; PCShellGetContext(pc, &ctx); - CahnHilliardData2* data = static_cast<CahnHilliardData2*>(ctx); + NSCHData* data = static_cast<NSCHData*>(ctx); /// extract vectors Vec b1, b2, b34, b5, x1, x2, x34, x5; diff --git a/AMDiS/src/parallel/PetscSolverNSCH.h b/AMDiS/src/parallel/PetscSolverNSCH.h index ebe44f2fdd92771e39eaafefbea00ac77e331ff7..b0db63f98df865d89eb984959cf7fc68b8365eaa 100644 --- a/AMDiS/src/parallel/PetscSolverNSCH.h +++ b/AMDiS/src/parallel/PetscSolverNSCH.h @@ -26,7 +26,7 @@ namespace AMDiS { namespace Parallel { - struct CahnHilliardData2 { + struct NSCHData { KSP kspMassCH, kspLaplaceCH, kspVelocity, kspLaplace, kspMass; Mat matMassCH, matMinusDeltaK, matGrad, matDiv, matConDif, matSchur, velocityMat; PetscSolverGlobalMatrix *globalMatrixSolver; @@ -169,7 +169,7 @@ namespace AMDiS { namespace Parallel { PetscSolver *massMatrixSolverCH, *laplaceMatrixSolverCH, *deltaKMatrixSolver; - CahnHilliardData2 matShellContext; + NSCHData matShellContext; double *eps, *delta; diff --git a/demo/CMakeLists.txt b/demo/CMakeLists.txt index 3c5aee9811fe8401bad4b94bfd015824cecc9884..b78d91f5c1e3fce397e2d7b178313302955aece2 100644 --- a/demo/CMakeLists.txt +++ b/demo/CMakeLists.txt @@ -4,13 +4,11 @@ cmake_minimum_required(VERSION 2.8) #find_package(AMDIS REQUIRED COMPONENTS umfpack ) find_package(AMDIS REQUIRED) -if (AMDIS_FOUND) - message(STATUS "AMDiS was found.") -# include(${AMDIS_USE_FILE}) - set(BASIS_LIBS AMDiS) - get_target_property(AMDIS_INCLUDE_DIRS AMDiS INTERFACE_INCLUDE_DIRECTORIES) - message("AMDiS-Include-Dirs: ${AMDIS_INCLUDE_DIRS}") -endif (AMDIS_FOUND) +if(AMDIS_FOUND) + message(STATUS "AMDiS was found.") + include(${AMDIS_USE_FILE}) + SET(BASIS_LIBS ${AMDIS_LIBRARIES}) +endif(AMDIS_FOUND) set(ball src/ball.cc) set(bunny src/bunny.cc)