Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run triqs without MPI #6

Merged
merged 9 commits into from
Apr 21, 2021
146 changes: 119 additions & 27 deletions c++/mpi/mpi.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,17 @@ namespace mpi {

// ------------------------------------------------------------

/* helper function to check for MPI runtime environment
* covers at the moment OpenMPI, MPICH, and intelmpi
* as cray uses MPICH under the hood it should work as well
*/
static const bool has_env = []() {
if (std::getenv("OMPI_COMM_WORLD_RANK") != nullptr or std::getenv("PMI_RANK") != nullptr)
return true;
else
return false;
}();

/// Environment must be initialized in C++
struct environment {

Expand All @@ -56,32 +67,49 @@ namespace mpi {
[[nodiscard]] MPI_Comm get() const noexcept { return _com; }

[[nodiscard]] int rank() const {
int num;
MPI_Comm_rank(_com, &num);
return num;
if (has_env) {
int num;
MPI_Comm_rank(_com, &num);
return num;
} else
return 0;
}

[[nodiscard]] int size() const {
int num;
MPI_Comm_size(_com, &num);
return num;
if (has_env) {
int num;
MPI_Comm_size(_com, &num);
return num;
} else
return 1;
}

[[nodiscard]] communicator split(int color, int key = 0) const {
communicator c;
MPI_Comm_split(_com, color, key, &c._com);
return c;
if (has_env) {
communicator c;
MPI_Comm_split(_com, color, key, &c._com);
return c;
} else
//TODO split should not be done without MPI?
return 0;
}

void abort(int error_code) { MPI_Abort(_com, error_code); }
void abort(int error_code) {
if (has_env)
MPI_Abort(_com, error_code);
else
std::abort();
}

#ifdef BOOST_MPI_HPP
// Conversion to and from boost communicator, Keep for backward compatibility
inline operator boost::mpi::communicator() const { return boost::mpi::communicator(_com, boost::mpi::comm_duplicate); }
inline communicator(boost::mpi::communicator c) : _com(c) {}
#endif

void barrier() const { MPI_Barrier(_com); }
void barrier() const {
if (has_env) { MPI_Barrier(_com); }
}
};

// ----------------------------------------
Expand All @@ -104,53 +132,113 @@ namespace mpi {
MPI_Op op{};
};

template <typename T>
inline constexpr bool is_mpi_lazy = false;

template <typename Tag, typename T>
inline constexpr bool is_mpi_lazy<lazy<Tag, T>> = true;

// ----------------------------------------
// ------- general functions -------
// ----------------------------------------

template <typename T>
[[gnu::always_inline]] inline decltype(auto) broadcast(T &&x, communicator c = {}, int root = 0) {
return mpi_broadcast(std::forward<T>(x), c, root);
[[gnu::always_inline]] inline void broadcast(T &x, communicator c = {}, int root = 0) {
static_assert(not std::is_const_v<T>, "mpi::broadcast cannot be called on const objects");
if (has_env) mpi_broadcast(x, c, root);
}

namespace details {

template <typename T>
inline constexpr bool is_mpi_lazy = false;

template <typename Tag, typename T>
inline constexpr bool is_mpi_lazy<lazy<Tag, T>> = true;

template <typename T>
inline constexpr bool is_std_vector = false;

template <typename T>
inline constexpr bool is_std_vector<std::vector<T>> = true;

template <typename T, typename V>
T convert(V v) {
if constexpr (is_std_vector<T>) {
T res;
res.reserve(v.size());
for (auto &x : v) res.emplace_back(convert<typename T::value_type>(std::move(x)));
return res;
} else
return T{std::move(v)};
}
} // namespace details

template <typename T>
[[gnu::always_inline]] inline decltype(auto) reduce(T &&x, communicator c = {}, int root = 0, bool all = false, MPI_Op op = MPI_SUM) {
return mpi_reduce(std::forward<T>(x), c, root, all, op);
using r_t = decltype(mpi_reduce(std::forward<T>(x), c, root, all, op));

if constexpr (details::is_mpi_lazy<r_t>) {
return mpi_reduce(std::forward<T>(x), c, root, all, op);
} else {
if (has_env)
return mpi_reduce(std::forward<T>(x), c, root, all, op);
else
return details::convert<r_t>(std::forward<T>(x));
}
}

template <typename T>
[[gnu::always_inline]] inline void reduce_in_place(T &&x, communicator c = {}, int root = 0, bool all = false, MPI_Op op = MPI_SUM) {
return mpi_reduce_in_place(std::forward<T>(x), c, root, all, op);
[[gnu::always_inline]] inline void reduce_in_place(T &x, communicator c = {}, int root = 0, bool all = false, MPI_Op op = MPI_SUM) {
static_assert(not std::is_const_v<T>, "In-place mpi functions cannot be called on const objects");
if (has_env) mpi_reduce_in_place(x, c, root, all, op);
}

template <typename T>
[[gnu::always_inline]] inline decltype(auto) scatter(T &&x, mpi::communicator c = {}, int root = 0) {
return mpi_scatter(std::forward<T>(x), c, root);
using r_t = decltype(mpi_scatter(std::forward<T>(x), c, root));

if constexpr (details::is_mpi_lazy<r_t>) {
return mpi_scatter(std::forward<T>(x), c, root);
} else {
// if it does not have a mpi lazy type, check manually if triqs is run with MPI
if (has_env)
return mpi_scatter(std::forward<T>(x), c, root);
else
return details::convert<r_t>(std::forward<T>(x));
}
}

template <typename T>
[[gnu::always_inline]] inline decltype(auto) gather(T &&x, mpi::communicator c = {}, int root = 0, bool all = false) {
return mpi_gather(std::forward<T>(x), c, root, all);
using r_t = decltype(mpi_gather(std::forward<T>(x), c, root, all));

if constexpr (details::is_mpi_lazy<r_t>) {
return mpi_gather(std::forward<T>(x), c, root, all);
} else {
// if it does not have a mpi lazy type, check manually if triqs is run with MPI
if (has_env)
return mpi_gather(std::forward<T>(x), c, root, all);
else
return details::convert<r_t>(std::forward<T>(x));
}
}

template <typename T>
[[gnu::always_inline]] inline decltype(auto) all_reduce(T &&x, communicator c = {}, MPI_Op op = MPI_SUM) {
return reduce(std::forward<T>(x), c, 0, true, op);
}

template <typename T>
[[gnu::always_inline]] inline void all_reduce_in_place(T &&x, communicator c = {}, MPI_Op op = MPI_SUM) {
return reduce_in_place(std::forward<T>(x), c, 0, true, op);
reduce_in_place(std::forward<T>(x), c, 0, true, op);
}

template <typename T>
[[gnu::always_inline]] inline decltype(auto) all_gather(T &&x, communicator c = {}) {
return gather(std::forward<T>(x), c, 0, true);
}

template <typename T>
[[gnu::always_inline]] [[deprecated("mpi_all_reduce is deprecated, please use mpi::all_reduce instead")]] inline decltype(auto)
mpi_all_reduce(T &&x, communicator c = {}, MPI_Op op = MPI_SUM) {
return reduce(std::forward<T>(x), c, 0, true, op);
}

template <typename T>
[[gnu::always_inline]] [[deprecated("mpi_all_gather is deprecated, please use mpi::all_gather instead")]] inline decltype(auto)
mpi_all_gather(T &&x, communicator c = {}) {
Expand Down Expand Up @@ -332,9 +420,13 @@ namespace mpi {

#define MPI_TEST_MAIN \
int main(int argc, char **argv) { \
mpi::environment env(argc, argv); \
::testing::InitGoogleTest(&argc, argv); \
return RUN_ALL_TESTS(); \
if (mpi::has_env) { \
mpi::environment env(argc, argv); \
std::cout << "MPI environment detected\n"; \
return RUN_ALL_TESTS(); \
} else \
return RUN_ALL_TESTS(); \
}

} // namespace mpi
34 changes: 32 additions & 2 deletions test/c++/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ endforeach()
# List of all tests
file(GLOB_RECURSE all_tests RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)

# List of all no mpi tests
file(GLOB_RECURSE nompi_tests RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
# remove custom mpi test, as this one explicitly uses MPI
list(REMOVE_ITEM nompi_tests mpi_custom.cpp mpi_monitor.cpp)

# ========= OpenMP Dependency ==========

find_package(OpenMP REQUIRED COMPONENTS CXX)
Expand All @@ -23,8 +28,7 @@ foreach(test ${all_tests})
target_link_libraries(${test_name} ${PROJECT_NAME}::${PROJECT_NAME}_c openmp ${PROJECT_NAME}_warnings gtest_main)
set_property(TARGET ${test_name} PROPERTY RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
set(test_bin ${CMAKE_CURRENT_BINARY_DIR}/${test_dir}/${test_name})
add_test(NAME ${test_name}_np1 COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 1 ${MPIEXEC_PREFLAGS} ${test_bin} ${MPIEXEC_POSTFLAGS} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
add_test(NAME ${test_name}_np2 COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} ${test_bin} ${MPIEXEC_POSTFLAGS} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
add_test(NAME ${test_name}_np2 COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 4 ${MPIEXEC_PREFLAGS} ${test_bin} ${MPIEXEC_POSTFLAGS} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
add_test(NAME ${test_name}_np4 COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 4 ${MPIEXEC_PREFLAGS} ${test_bin} ${MPIEXEC_POSTFLAGS} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
# Run clang-tidy if found
if(CLANG_TIDY_EXECUTABLE)
Expand All @@ -45,3 +49,29 @@ foreach(test ${all_tests})
)
endif()
endforeach()

# now the no mpi tests
foreach(test ${nompi_tests})
get_filename_component(test_name ${test} NAME_WE)
get_filename_component(test_dir ${test} DIRECTORY)
set(test_bin ${CMAKE_CURRENT_BINARY_DIR}/${test_dir}/${test_name})
add_test(NAME ${test_name}_nompi COMMAND ${test_bin} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${test_dir})
# Run clang-tidy if found
if(CLANG_TIDY_EXECUTABLE)
set_target_properties(${test_name} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_EXECUTABLE}")
endif()
# Run cppcheck if found
if(CPPCHECK_EXECUTABLE)
add_custom_command(
TARGET ${test_name}
COMMAND ${CPPCHECK_EXECUTABLE}
--enable=warning,style,performance,portability
--std=c++17
--template=gcc
--verbose
--force
--quiet
${CMAKE_CURRENT_SOURCE_DIR}/${test}
)
endif()
endforeach()