Commit a30328a6 authored by Praetorius, Simon's avatar Praetorius, Simon

cmake installation, some correction in send and recv with std::vector

parent 92c31401
project("mpi14")
cmake_minimum_required(VERSION 3.1)
set(mpi14_VERSION 0.1.0)
find_package(MPI REQUIRED)
find_package(Boost 1.56 REQUIRED iostreams serialization)
# the parameter --enable-new-dtags causes a linker problem, i.e. some libraries are
# linked without an rpath (or any other path information) and can not be found while
......@@ -9,9 +12,25 @@ find_package(MPI REQUIRED)
string(REPLACE "-Wl,--enable-new-dtags" "" MY_MPI_CXX_LINK_FLAGS " ${MPI_CXX_LINK_FLAGS} ")
string(STRIP "${MY_MPI_CXX_LINK_FLAGS}" MY_MPI_CXX_LINK_FLAGS)
find_package(Boost 1.56 REQUIRED iostreams serialization)
add_executable(test_mpi test/test_mpi.cc)
target_include_directories(test_mpi PRIVATE src/ ${MPI_CXX_INCLUDE_PATH} ${Boost_INCLUDE_DIR})
target_compile_options(test_mpi PRIVATE ${MPI_CXX_COMPILE_FLAGS} -std=c++14 -DHAS_COMPRESSION=1)
target_link_libraries(test_mpi PRIVATE ${MY_MPI_CXX_LINK_FLAGS} ${MPI_CXX_LIBRARIES} ${Boost_LIBRARIES})
set(MPI14_INCLUDE_DIRS ${CMAKE_SOURCE_DIR}/src/ ${MPI_CXX_INCLUDE_PATH} ${Boost_INCLUDE_DIR})
set(MPI14_CXX_FLAGS ${MPI_CXX_COMPILE_FLAGS} -std=c++14 -DHAS_COMPRESSION=1)
set(MPI14_LIBRARIES ${MY_MPI_CXX_LINK_FLAGS} ${MPI_CXX_LIBRARIES} ${Boost_LIBRARIES})
add_subdirectory(src)
add_subdirectory(test)
set(INCLUDE_INSTALL_DIR include/)
include(CMakePackageConfigHelpers)
configure_package_config_file(mpi14-config.cmake.in
${CMAKE_CURRENT_BINARY_DIR}/mpi14-config.cmake
INSTALL_DESTINATION share/cmake/
PATH_VARS INCLUDE_INSTALL_DIR)
write_basic_package_version_file(
${CMAKE_CURRENT_BINARY_DIR}/mpi14-config-version.cmake
VERSION ${mpi14_VERSION}
COMPATIBILITY SameMajorVersion )
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/mpi14-config.cmake
${CMAKE_CURRENT_BINARY_DIR}/mpi14-config-version.cmake
DESTINATION share/cmake )
set(mpi14_VERSION 0.1.0)
@PACKAGE_INIT@
if (${CMAKE_VERSION} VERSION_LESS "3.1")
message(FATAL_ERROR "The Configuration file was created for CMake version >= 3.1")
endif ()
set_and_check(MPI14_INCLUDE_DIR "@PACKAGE_INCLUDE_INSTALL_DIR@")
add_library(mpi14 INTERFACE)
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(MPI REQUIRED)
find_package(Boost 1.56 REQUIRED iostreams serialization)
# the parameter --enable-new-dtags causes a linker problem, i.e. some libraries are
# linked without an rpath (or any other path information) and can not be found while
# running the executable. The hack below removes this flag manually from the linker flags.
string(REPLACE "-Wl,--enable-new-dtags" "" MY_MPI_CXX_LINK_FLAGS " ${MPI_CXX_LINK_FLAGS} ")
string(STRIP "${MY_MPI_CXX_LINK_FLAGS}" MY_MPI_CXX_LINK_FLAGS)
target_include_directories(mpi14 INTERFACE ${MPI14_INCLUDE_DIR} ${MPI_CXX_INCLUDE_PATH} ${Boost_INCLUDE_DIR})
target_compile_options(mpi14 INTERFACE ${MPI_CXX_COMPILE_FLAGS} -DHAS_COMPRESSION=1)
target_link_libraries(mpi14 INTERFACE ${MY_MPI_CXX_LINK_FLAGS} ${MPI_CXX_LIBRARIES} ${Boost_LIBRARIES})
add_subdirectory(mpi14)
#pragma once
#include <chrono>
#include <future>
namespace mpi14
{
class FutureBase
{
public:
virtual ~FutureBase() {}
virtual bool test() = 0;
void wait()
{
while( !test() ) ;
}
template <class Rep, class Period>
std::future_status wait_for(std::chrono::duration<Rep,Period> const& timeout_duration)
{
std::chrono::system_clock::time_point t = std::chrono::system_clock::now();
while( !test() ) {
if ((std::chrono::system_clock::now() - t) >= timeout_duration)
break;
}
if (test())
return std::future_status::ready;
else
return std::future_status::timeout;
}
template <class Clock, class Duration>
std::future_status wait_until(std::chrono::time_point<Clock,Duration> const& timeout_time)
{
return wait_for(timeout_time - std::chrono::system_clock::now());
}
};
} // end namespace mpi14
install(FILES
Collective.hpp
Common.hpp
Communicator.hpp
Environment.hpp
FutureBase.hpp
FutureSerialization.hpp
FutureVector.hpp
Request.hpp
RequestChain.hpp
Serialization.hpp
Type_Traits.hpp
DESTINATION include/mpi14/
)
add_subdirectory(impl)
#pragma once
#include "Common.hpp"
#include "Communicator.hpp"
#include "Type_Traits.hpp"
namespace mpi14
{
template <class T, class Operation,
REQUIRES( is_mpi_type<T> )>
void all_reduce(Communicator& comm, T const& in, T& out, Operation)
{
MPI_Allreduce(&in, &out, 1, type_to_mpi<T>, op_to_mpi<Operation>, comm);
}
template <class T, class Operation,
REQUIRES( is_mpi_type<T> )>
void all_reduce(Communicator& comm, T& inout, Operation)
{
MPI_Allreduce(MPI_IN_PLACE, &inout, 1, type_to_mpi<T>, op_to_mpi<Operation>, comm);
}
} // end namespace mpi14
......@@ -45,30 +45,30 @@ namespace mpi14
// send mpi datatype
template <class Data,
REQUIRES( is_mpi_type<Data> )>
MPI_Status send(Data const& data, int to, int tag = 0) const;
void send(Data const& data, int to, int tag = 0) const;
// send array of mpi datatypes
template <class T,
REQUIRES( is_mpi_type<T> )>
MPI_Status send(T const* data, std::size_t size, int to, int tag = 0) const;
void send(T const* data, std::size_t size, int to, int tag = 0) const;
template <class T, std::size_t N,
REQUIRES( is_mpi_type<T> )>
MPI_Status send(T const (&data)[N], int to, int tag = 0) const
void send(T const (&data)[N], int to, int tag = 0) const
{
send(data, N, to, tag);
}
template <class T, std::size_t N,
REQUIRES( is_mpi_type<T> )>
MPI_Status send(std::array<T,N> const& array, int to, int tag = 0) const
void send(std::array<T,N> const& array, int to, int tag = 0) const
{
send(array.data(), N, to, tag);
}
template <class T,
REQUIRES( not is_mpi_type<T> )>
MPI_Status send(std::vector<T> const& data, int to, int tag = 0) const;
REQUIRES( is_mpi_type<T> )>
void send(std::vector<T> const& vec, int to, int tag = 0) const;
// send complex datatype:
// 1. create a binary representation of data, store it in a buffer
......@@ -76,7 +76,7 @@ namespace mpi14
// 3. send buffer
template <class Data,
REQUIRES( not is_mpi_type<Data> )>
MPI_Status send(Data const& data, int to, int tag = 0) const;
void send(Data const& data, int to, int tag = 0) const;
// -------------------------------------------------------------------------------------
......@@ -167,7 +167,7 @@ namespace mpi14
// 3. receive data into vector
template <class T,
REQUIRES( is_mpi_type<T> )>
FutureVector<T> irecv(std::vector<T>& data, int from, int tag) const
FutureVector<T> irecv(std::vector<T>& data, int from, int tag = 0) const
{
return {data, from, tag, comm_};
}
......@@ -176,7 +176,7 @@ namespace mpi14
// Wait for data, by calling Future::test() or Future::wait().
template <class Data,
REQUIRES( not is_mpi_type<Data> )>
FutureSerialization<Data> irecv(Data& data, int from, int tag) const
FutureSerialization<Data> irecv(Data& data, int from, int tag = 0) const
{
return {data, from, tag, comm_};
}
......@@ -200,4 +200,4 @@ namespace mpi14
} // end namespace mpi14
#include "impl/Communicator.hpp"
#include "impl/Communicator.impl.hpp"
#pragma once
#include <chrono>
#include <list>
#include <future>
namespace mpi14
{
class FutureBase
{
public:
virtual ~FutureBase() {}
virtual bool test() = 0;
void wait()
{
while( !test() ) ;
}
template <class Rep, class Period>
std::future_status wait_for(std::chrono::duration<Rep,Period> const& timeout_duration)
{
std::chrono::system_clock::time_point t = std::chrono::system_clock::now();
while( !test() ) {
if ((std::chrono::system_clock::now() - t) >= timeout_duration)
break;
}
if (test())
return std::future_status::ready;
else
return std::future_status::timeout;
}
template <class Clock, class Duration>
std::future_status wait_until(std::chrono::time_point<Clock,Duration> const& timeout_time)
{
return wait_for(timeout_time - std::chrono::system_clock::now());
}
};
// Blocks until all communication operations associated with active handles in the range complete.
template <class FutureIter>
void wait_all(FutureIter first, FutureIter last)
......
#pragma once
#include <functional>
#include <type_traits>
#include "Common.hpp"
namespace mpi14
......@@ -58,5 +61,34 @@ namespace mpi14
template <class T>
constexpr MPI_Datatype type_to_mpi = aux::type_to_mpi<T>::value;
// additional functor-tags
struct minimum {};
struct maximum {};
namespace aux
{
template <class T> using always_false = std::false_type;
template <class Op>
struct op_to_mpi
{
static_assert( always_false<Op>::value, "Op is not an MPI operation!" );
};
template <> struct op_to_mpi<minimum> { static constexpr MPI_Op value = MPI_MIN; };
template <> struct op_to_mpi<maximum> { static constexpr MPI_Op value = MPI_MAX; };
template <> struct op_to_mpi<std::plus<>> { static constexpr MPI_Op value = MPI_SUM; };
template <> struct op_to_mpi<std::multiplies<>> { static constexpr MPI_Op value = MPI_PROD; };
template <> struct op_to_mpi<std::logical_and<>> { static constexpr MPI_Op value = MPI_LAND; };
template <> struct op_to_mpi<std::logical_or<>> { static constexpr MPI_Op value = MPI_LOR; };
template <> struct op_to_mpi<std::bit_and<>> { static constexpr MPI_Op value = MPI_BAND; };
template <> struct op_to_mpi<std::bit_or<>> { static constexpr MPI_Op value = MPI_BOR; };
template <> struct op_to_mpi<std::bit_xor<>> { static constexpr MPI_Op value = MPI_BXOR; };
} // end namespac aux
template <class T>
constexpr MPI_Op op_to_mpi = aux::op_to_mpi<T>::value;
} // end namespace mpi14
install(FILES
Communicator.impl.hpp
DESTINATION include/mpi14/impl/
)
......@@ -5,32 +5,26 @@ namespace mpi14 {
// send mpi datatype
template <class Data,
REQUIRES( is_mpi_type<Data> )>
MPI_Status Communicator::send(Data const& data, int to, int tag) const
void Communicator::send(Data const& data, int to, int tag) const
{
MPI_Status status;
MPI_Send(&data, 1, type_to_mpi<Data>, to, tag, comm_, &status);
return status;
MPI_Send(&data, 1, type_to_mpi<Data>, to, tag, comm_);
}
// send array of mpi datatypes
template <class Data,
REQUIRES( is_mpi_type<Data> )>
MPI_Status Communicator::send(Data const* data, std::size_t size, int to, int tag) const
void Communicator::send(Data const* data, std::size_t size, int to, int tag) const
{
MPI_Status status;
MPI_Send(data, int(size), type_to_mpi<Data>, to, tag, comm_, &status);
return status;
MPI_Send(data, int(size), type_to_mpi<Data>, to, tag, comm_);
}
template <class T,
REQUIRES( not is_mpi_type<T> )>
MPI_Status Communicator::send(std::vector<T> const& vec, int to, int tag) const
REQUIRES( is_mpi_type<T> )>
void Communicator::send(std::vector<T> const& vec, int to, int tag) const
{
MPI_Status status;
MPI_Send(vec.data(), int(vec.size()), type_to_mpi<T>, to, tag, comm_, &status);
return status;
MPI_Send(vec.data(), int(vec.size()), type_to_mpi<T>, to, tag, comm_);
}
......@@ -40,13 +34,10 @@ MPI_Status Communicator::send(std::vector<T> const& vec, int to, int tag) const
// 3. send buffer
template <class Data,
REQUIRES( not is_mpi_type<Data> )>
MPI_Status Communicator::send(Data const& data, int to, int tag) const
void Communicator::send(Data const& data, int to, int tag) const
{
auto buffer = store(data);
MPI_Status status;
MPI_Send(buffer.data(), int(buffer.size()), MPI_BYTE, to, tag, comm_, &status);
return status;
auto buffer = serialization::store(data);
MPI_Send(buffer.data(), int(buffer.size()), MPI_BYTE, to, tag, comm_);
}
// -------------------------------------------------------------------------------------
......@@ -153,7 +144,7 @@ MPI_Status Communicator::recv(Data* data, std::size_t size, int from, int tag) c
// receive array of mpi datatypes
template <class T,
REQUIRES( is_mpi_type<T> )>
MPI_Status Communicator::recv(std::vector<T>& data, int from, int tag) const
MPI_Status Communicator::recv(std::vector<T>& vec, int from, int tag) const
{
MPI_Status status;
MPI_Probe(from, tag, comm_, &status);
......@@ -161,8 +152,8 @@ MPI_Status Communicator::recv(std::vector<T>& data, int from, int tag) const
int size = 0;
MPI_Get_count(&status, type_to_mpi<T>, &size);
data.resize(size);
MPI_Recv(data.data(), size, type_to_mpi<T>, from, tag, comm_, MPI_STATUS_IGNORE);
vec.resize(size);
MPI_Recv(vec.data(), size, type_to_mpi<T>, from, tag, comm_, MPI_STATUS_IGNORE);
return status;
}
......
add_executable(test_mpi test_mpi.cc)
target_include_directories(test_mpi PRIVATE ${MPI14_INCLUDE_DIRS})
target_compile_options(test_mpi PRIVATE ${MPI14_CXX_FLAGS})
target_link_libraries(test_mpi PRIVATE ${MPI14_LIBRARIES})
add_executable(test_verlet test_verlet.cc)
target_include_directories(test_verlet PRIVATE ${MPI14_INCLUDE_DIRS})
target_compile_options(test_verlet PRIVATE ${MPI14_CXX_FLAGS})
target_link_libraries(test_verlet PRIVATE ${MPI14_LIBRARIES})
add_executable(test_verlet2 test_verlet2.cc)
target_include_directories(test_verlet2 PRIVATE ${MPI14_INCLUDE_DIRS})
target_compile_options(test_verlet2 PRIVATE ${MPI14_CXX_FLAGS})
target_link_libraries(test_verlet2 PRIVATE ${MPI14_LIBRARIES})
......@@ -5,9 +5,9 @@
#include <boost/serialization/vector.hpp>
#include <boost/serialization/list.hpp>
#include "Environment.hpp"
#include "Communicator.hpp"
#include "Collective.hpp"
#include <mpi14/Environment.hpp>
#include <mpi14/Communicator.hpp>
#include <mpi14/Collective.hpp>
int main(int argc, char** argv)
{
......@@ -28,6 +28,8 @@ int main(int argc, char** argv)
std::cout << "[" << rank << "]: in.size = " << in.size() << ", out.size = " << out.size() << "\n";
int result = rank;
mpi14::all_reduce(world, result, mpi14::minimum{});
std::list<double> in2;
for (int i = 0; i < rank+5; ++i)
......
#include <iostream>
#include <vector>
#include <list>
#include <algorithm>
#include <numeric>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/list.hpp>
#include <mpi14/Environment.hpp>
#include <mpi14/Communicator.hpp>
#include <mpi14/Collective.hpp>
template <class T>
bool compare(T lhs, T rhs) { return lhs == rhs; }
template <class T, class A>
bool compare(std::vector<T,A> const& lhs, std::vector<T,A> const& rhs)
{
return lhs.size() == rhs.size() && std::inner_product(lhs.begin(), lhs.end(), rhs.begin(), true,
[](bool l, bool r) { return l && r; }, [](T const& l, T const& r) { return compare(l, r); });
}
int main(int argc, char** argv)
{
mpi14::Environment env(argc, argv);
mpi14::Communicator world;
int rank = world.rank();
std::vector<int> in;
in.resize(10);
std::fill(in.begin(), in.end(), 42);
std::vector<int> out;
if (rank == 0) {
out = in;
for (int r = 1; r < world.size(); ++r) {
auto req = world.isend(in, r);
MPI_Request_free(req.get());
}
} else {
world.recv(out, 0);
}
std::cout << "[" << rank << "] in == out ? ---> " << compare(in,out) << "\n";
}
#include <iostream>
#include <vector>
#include <list>
#include <algorithm>
#include <numeric>
#include <mpi14/Environment.hpp>
#include <mpi14/Communicator.hpp>
#include <mpi14/Collective.hpp>
int main(int argc, char** argv)
{
mpi14::Environment env(argc, argv);
mpi14::Communicator world;
int rank = world.rank();
std::vector<int> in;
in.resize(10);
std::fill(in.begin(), in.end(), rank + 42);
std::vector<std::vector<int>> outs(world.size());
if (rank == 0) {
outs[0] = in;
using Future = mpi14::FutureVector<int>;
std::vector<Future> requests;
for (int r = 1; r < world.size(); ++r)
requests.emplace_back( world.irecv(outs[r], r) );
mpi14::wait_all(requests.begin(), requests.end());
} else {
world.send(in, 0);
}
if (rank == 0) {
std::cout << "[" << rank << "] sizes = {" << outs[0].size();
for (std::size_t i = 1; i < outs.size(); ++i)
std::cout << ", " << outs[i].size();
std::cout << "}\n";
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment