diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 81e027ac6..52de3dd4a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -183,18 +183,22 @@ jobs: - name: Run func tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run func tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run func tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run func tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 ubuntu-gcc-test-extended: needs: @@ -221,18 +225,22 @@ jobs: - name: Run func tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 - name: Run func tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 - name: Run func tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 - name: Run func tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 ubuntu-clang-build: needs: @@ -332,18 +340,22 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 ubuntu-clang-test-extended: needs: @@ -372,18 +384,22 @@ jobs: - name: Run tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 - name: Run tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 - name: Run tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 - name: Run tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 ubuntu-clang-sanitizer-build: needs: @@ -420,6 +436,7 @@ jobs: env: CC: clang-20 CXX: clang++-20 + PPC_ASAN_RUN: 1 - name: Build project run: | cmake --build build --parallel @@ -473,6 +490,7 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 PPC_ASAN_RUN: 1 ASAN_OPTIONS: abort_on_error=1 @@ -480,6 +498,7 @@ jobs: - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 PPC_ASAN_RUN: 1 ASAN_OPTIONS: abort_on_error=1 @@ -487,6 +506,7 @@ jobs: - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 PPC_ASAN_RUN: 1 ASAN_OPTIONS: abort_on_error=1 @@ -494,6 +514,7 @@ jobs: - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 PPC_ASAN_RUN: 1 ASAN_OPTIONS: abort_on_error=1 @@ -526,21 +547,25 @@ jobs: - name: Run tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 PPC_ASAN_RUN: 1 - name: Run tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 PPC_ASAN_RUN: 1 - name: Run tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 PPC_ASAN_RUN: 1 - name: Run tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 PPC_ASAN_RUN: 1 macos-clang-build: @@ -678,18 +703,22 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 macos-clang-test-extended: needs: @@ -716,18 +745,22 @@ jobs: - name: Run tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 - name: Run tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 - name: Run tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 - name: Run tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 windows-msvc-build: needs: @@ -874,18 +907,22 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 windows-msvc-test-extended: needs: @@ -914,18 +951,22 @@ jobs: - name: Run tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 - name: Run tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 - name: Run tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 - name: Run tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 windows-clang-build: needs: @@ -939,42 +980,52 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: Add msbuild to PATH - uses: microsoft/setup-msbuild@v2 + + - name: Setup LLVM + uses: KyleMayes/install-llvm-action@v2 with: - vs-version: 'latest' + version: "20.1.4" + - name: Setup MPI uses: mpi4py/setup-mpi@v1 with: mpi: msmpi + - name: Setup ccache uses: Chocobo1/setup-ccache-action@v1 with: windows_compile_environment: msvc + - name: Setup ninja uses: seanmiddleditch/gha-setup-ninja@v6 - - name: Setup MSVC for Ninja again - uses: ilammy/msvc-dev-cmd@v1 + - name: CMake configure run: > - cmake -S . -B build -G Ninja -D CMAKE_C_COMPILER=clang-cl -DCMAKE_CXX_COMPILER=clang-cl - -D CMAKE_C_COMPILER_LAUNCHER=ccache -D CMAKE_CXX_COMPILER_LAUNCHER=ccache - -D CMAKE_BUILD_TYPE=RELEASE -DCMAKE_INSTALL_PREFIX=install + cmake -S . -B build -G Ninja + -D CMAKE_C_COMPILER=clang-cl + -D CMAKE_CXX_COMPILER=clang-cl + -D CMAKE_C_COMPILER_LAUNCHER=ccache + -D CMAKE_CXX_COMPILER_LAUNCHER=ccache + -D CMAKE_BUILD_TYPE=Release + -D CMAKE_INSTALL_PREFIX=install + -D CMAKE_PREFIX_PATH="C:/Program Files/LLVM" env: CC: clang-cl CXX: clang-cl + - name: Build project - run: | - cmake --build build --config Release --parallel + run: cmake --build build --config Release --parallel env: CC: clang-cl CXX: clang-cl + - name: Install project - run: | - cmake --install build + run: cmake --install build + - name: Archive installed package run: Compress-Archive -Path install -DestinationPath windows-clang-install.zip shell: pwsh + - name: Upload installed package uses: actions/upload-artifact@v4 with: @@ -1007,18 +1058,22 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 windows-clang-test-extended: needs: @@ -1047,18 +1102,22 @@ jobs: - name: Run tests (threads, num_threads=5) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 5 - name: Run tests (threads, num_threads=7) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 7 - name: Run tests (threads, num_threads=11) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 11 - name: Run tests (threads, num_threads=13) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 13 ubuntu-gcc-build-codecov: needs: @@ -1083,7 +1142,7 @@ jobs: max-size: 1G - name: CMake configure run: > - cmake -S . -B build + cmake -S . -B build -GNinja -D CMAKE_C_COMPILER_LAUNCHER=ccache -D CMAKE_CXX_COMPILER_LAUNCHER=ccache -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_VERBOSE_MAKEFILE=ON -D USE_COVERAGE=ON @@ -1098,18 +1157,22 @@ jobs: - name: Run tests (threads, num_threads=1) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 1 - name: Run tests (threads, num_threads=2) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 2 - name: Run tests (threads, num_threads=3) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 3 - name: Run tests (threads, num_threads=4) run: python3 scripts/run_tests.py --running-type="threads" env: + PPC_NUM_PROC: 1 PPC_NUM_THREADS: 4 - name: Generate gcovr Coverage Data run: | @@ -1118,14 +1181,12 @@ jobs: gcovr -r ../ \ --exclude '.*3rdparty/.*' \ --exclude '/usr/.*' \ - --exclude '.*/perf_tests/.*' \ - --exclude '.*/func_tests/.*' \ - --exclude '.*/all/runner.cpp' \ - --exclude '.*/mpi/runner.cpp' \ - --exclude '.*/omp/runner.cpp' \ - --exclude '.*/seq/runner.cpp' \ - --exclude '.*/stl/runner.cpp' \ - --exclude '.*/tbb/runner.cpp' \ + --exclude '.*tasks/.*/tests/.*' \ + --exclude '.*modules/.*/tests/.*' \ + --exclude '.*tasks/common/runners/.*' \ + --exclude '.*modules/core/util/include/perf_test_util.hpp' \ + --exclude '.*modules/core/util/include/func_test_util.hpp' \ + --exclude '.*modules/core/util/src/func_test_util.cpp' \ --xml --output ../coverage.xml \ --html=../cov-report/index.html --html-details - name: Upload coverage reports to Codecov @@ -1161,12 +1222,6 @@ jobs: run: | mkdir -p install tar -xzvf ubuntu-gcc-install-ubuntu-24.04.tar.gz -C install - - name: Run perf count checker - run: | - python3 scripts/run_perf_counter.py --required-tests-number=2 - env: - PPC_NUM_THREADS: 2 - PPC_NUM_PROC: 2 - name: Run perf tests run: | bash -e scripts/generate_perf_results.sh diff --git a/.github/workflows/static-analysis-pr.yml b/.github/workflows/static-analysis-pr.yml index e5aa5c86c..379f68402 100644 --- a/.github/workflows/static-analysis-pr.yml +++ b/.github/workflows/static-analysis-pr.yml @@ -24,7 +24,7 @@ jobs: id: review with: build_dir: build - apt_packages: mpich,libmpich*,mpi*,openmpi-bin,ninja-build,libomp-19-dev,valgrind + apt_packages: openmpi-bin,openmpi-common,libopenmpi-dev,ninja-build,libomp-19-dev,valgrind cmake_command: > cmake -S . -B build -D CMAKE_C_COMPILER_LAUNCHER=ccache -D CMAKE_CXX_COMPILER_LAUNCHER=ccache @@ -57,7 +57,7 @@ jobs: id: review with: build_dir: build - apt_packages: mpich,libmpich*,mpi*,openmpi-bin,ninja-build,libomp-19-dev,valgrind + apt_packages: openmpi-bin,openmpi-common,libopenmpi-dev,ninja-build,libomp-19-dev,valgrind cmake_command: > cmake -S . -B build -D CMAKE_C_COMPILER_LAUNCHER=ccache -D CMAKE_CXX_COMPILER_LAUNCHER=ccache diff --git a/.gitmodules b/.gitmodules index 5f55e2008..6ef00628f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,9 @@ [submodule "3rdparty/stb"] path = 3rdparty/stb url = https://github.com/nothings/stb +[submodule "3rdparty/json"] + path = 3rdparty/json + url = https://github.com/nlohmann/json +[submodule "3rdparty/libenvpp"] + path = 3rdparty/libenvpp + url = https://github.com/ph3at/libenvpp diff --git a/3rdparty/json b/3rdparty/json new file mode 160000 index 000000000..c633693d3 --- /dev/null +++ b/3rdparty/json @@ -0,0 +1 @@ +Subproject commit c633693d3e2ab81fa186c691c452c47ced107845 diff --git a/3rdparty/libenvpp b/3rdparty/libenvpp new file mode 160000 index 000000000..86db27f6d --- /dev/null +++ b/3rdparty/libenvpp @@ -0,0 +1 @@ +Subproject commit 86db27f6df18a6e6a6ed143833bca0fd20977bf4 diff --git a/3rdparty/stb_image_wrapper.cpp b/3rdparty/stb_image_wrapper.cpp new file mode 100644 index 000000000..b8d05c3e5 --- /dev/null +++ b/3rdparty/stb_image_wrapper.cpp @@ -0,0 +1,4 @@ +#define STB_IMAGE_IMPLEMENTATION + +// NOLINTNEXTLINE(misc-include-cleaner) +#include "stb_library.hpp" diff --git a/3rdparty/stb_library.hpp b/3rdparty/stb_library.hpp index 36039a3d8..56492de6f 100644 --- a/3rdparty/stb_library.hpp +++ b/3rdparty/stb_library.hpp @@ -5,7 +5,6 @@ #pragma clang diagnostic ignored "-Wcast-align" #endif -#define STB_IMAGE_IMPLEMENTATION #include #if defined(__clang__) diff --git a/CMakeLists.txt b/CMakeLists.txt index a19d5840f..82bc88efe 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -29,6 +29,8 @@ message( STATUS "PPC step: First configures" ) include(cmake/configure.cmake) include(cmake/modes.cmake) include(cmake/sanitizers.cmake) +include(cmake/json.cmake) +include(cmake/libenvpp.cmake) ################# Parallel programming technologies ################# diff --git a/cmake/configure.cmake b/cmake/configure.cmake index 836e81a1d..f71cdb98e 100644 --- a/cmake/configure.cmake +++ b/cmake/configure.cmake @@ -10,6 +10,10 @@ if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "Release") endif(NOT CMAKE_BUILD_TYPE) +if (MSVC) + add_compile_options("/utf-8") +endif() + set( CMAKE_ARCHIVE_OUTPUT_DIRECTORY_DEBUG "${CMAKE_BINARY_DIR}/arch" ) set( CMAKE_LIBRARY_OUTPUT_DIRECTORY_DEBUG "${CMAKE_BINARY_DIR}/lib" ) set( CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG "${CMAKE_BINARY_DIR}/bin" ) @@ -22,11 +26,9 @@ set( CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/arch" ) set( CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" ) set( CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" ) -if ( MSVC AND (CMAKE_C_COMPILER_ID STREQUAL "Clang") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")) - set( CMAKE_CXX_STANDARD 17 ) -else () - set( CMAKE_CXX_STANDARD 23 ) -endif () +set( CMAKE_CXX_STANDARD 23 ) + +set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_COMPILE_WARNING_AS_ERROR ON) diff --git a/cmake/functions.cmake b/cmake/functions.cmake new file mode 100644 index 000000000..e1cfcf669 --- /dev/null +++ b/cmake/functions.cmake @@ -0,0 +1,105 @@ +# ——— Helper function to add & register tests ————————————————————————— +function(ppc_add_test test_name test_src USE_FLAG) + if(${USE_FLAG}) + add_executable(${test_name} "${PROJECT_SOURCE_DIR}/${test_src}") + enable_testing() + add_test(NAME ${test_name} COMMAND ${test_name}) + install(TARGETS ${test_name} RUNTIME DESTINATION bin) + endif() +endfunction() + +# Function to configure tests +function(add_tests test_flag exec_target subdir) + if(${test_flag}) + # Gather all source files under tests/ + file(GLOB_RECURSE src_files + "${TEST_DIR}/${subdir}/*.cpp" + "${TEST_DIR}/${subdir}/*.cxx" + "${TEST_DIR}/${subdir}/*.cc" + ) + target_sources(${exec_target} PRIVATE ${src_files}) + list(APPEND TEST_EXECUTABLES ${exec_target}) + set(TEST_EXECUTABLES "${TEST_EXECUTABLES}" PARENT_SCOPE) + endif() +endfunction() + +# ============================================================================ +# Function: setup_implementation +# - NAME: implementation sub‐directory name (e.g. “mpi”) +# - PROJ_NAME: project base name +# - BASE_DIR: root source directory +# - TESTS: list of test executables to link against +# ============================================================================ +function(setup_implementation) + # parse named args: NAME, PROJ_NAME, BASE_DIR; multi‐value: TESTS + cmake_parse_arguments( + SETUP + "" # no plain options + "NAME;PROJ_NAME;BASE_DIR" + "TESTS" + ${ARGN} + ) + + # skip if impl dir doesn't exist + set(IMP_DIR "${SETUP_BASE_DIR}/${SETUP_NAME}") + if(NOT EXISTS "${IMP_DIR}") + return() + endif() + message(STATUS " -- ${SETUP_NAME}") + + # collect sources + file(GLOB_RECURSE CPP_SOURCES "${IMP_DIR}/src/*.cpp") + file(GLOB_RECURSE ALL_SOURCES + "${IMP_DIR}/include/*.h" + "${IMP_DIR}/include/*.hpp" + "${IMP_DIR}/src/*.cpp" + ) + + # create library (STATIC if .cpp exist, otherwise INTERFACE) + set(LIB_NAME "${SETUP_PROJ_NAME}_${SETUP_NAME}") + if(CPP_SOURCES) + add_library(${LIB_NAME} STATIC ${ALL_SOURCES}) + else() + add_library(${LIB_NAME} INTERFACE ${ALL_SOURCES}) + endif() + + # link core module + target_link_libraries(${LIB_NAME} PUBLIC core_module_lib) + + # and link into each enabled test executable + foreach(test_exec ${SETUP_TESTS}) + target_link_libraries(${test_exec} PUBLIC ${LIB_NAME}) + endforeach() +endfunction() + +# Function to configure each subproject +function(ppc_configure_subproject SUBDIR) + # Module-specific compile-time definitions + add_compile_definitions( + PPC_SETTINGS_${SUBDIR}="${CMAKE_CURRENT_SOURCE_DIR}/${SUBDIR}/settings.json" + PPC_ID_${SUBDIR}="${SUBDIR}" + ) + + # Switch project context to the subproject + project(${SUBDIR}) + + # Directory with tests and list of test executables (populated by setup_implementation) + set(TEST_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${SUBDIR}/tests") + set(TEST_EXECUTABLES "") + + # Register functional and performance test runners + add_tests(USE_FUNC_TESTS ${FUNC_TEST_EXEC} functional) + add_tests(USE_PERF_TESTS ${PERF_TEST_EXEC} performance) + + message(STATUS "${SUBDIR}") + + # List of implementations to configure + foreach(IMPL IN LISTS IMPLEMENTATIONS) + setup_implementation( + NAME ${IMPL} + PROJ_NAME ${SUBDIR} + TESTS "${TEST_EXECUTABLES}" + BASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${SUBDIR}" + ) + endforeach() +endfunction() diff --git a/cmake/json.cmake b/cmake/json.cmake new file mode 100644 index 000000000..3fd9b0d42 --- /dev/null +++ b/cmake/json.cmake @@ -0,0 +1,12 @@ +include_directories(${CMAKE_SOURCE_DIR}/3rdparty/json/include) +include(ExternalProject) +ExternalProject_Add(ppc_json + SOURCE_DIR "${CMAKE_SOURCE_DIR}/3rdparty/json" + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/ppc_json" + BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/build" + INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/install" + CONFIGURE_COMMAND "${CMAKE_COMMAND}" -S "${CMAKE_SOURCE_DIR}/3rdparty/json/" -B "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/build/" + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -G${CMAKE_GENERATOR} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} + -D CMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER} -D CMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER} + BUILD_COMMAND "${CMAKE_COMMAND}" --build "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/build" --config ${CMAKE_BUILD_TYPE} --parallel + INSTALL_COMMAND "${CMAKE_COMMAND}" --install "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/build" --prefix "${CMAKE_CURRENT_BINARY_DIR}/ppc_json/install") diff --git a/cmake/libenvpp.cmake b/cmake/libenvpp.cmake new file mode 100644 index 000000000..449cd281e --- /dev/null +++ b/cmake/libenvpp.cmake @@ -0,0 +1,27 @@ +include_directories(${CMAKE_SOURCE_DIR}/3rdparty/libenvpp/include) +include_directories(SYSTEM ${CMAKE_SOURCE_DIR}/3rdparty/libenvpp/external/fmt/include) + +include(ExternalProject) +ExternalProject_Add(ppc_libenvpp + SOURCE_DIR "${CMAKE_SOURCE_DIR}/3rdparty/libenvpp" + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp" + BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/build" + INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/install" + CONFIGURE_COMMAND "${CMAKE_COMMAND}" -S "${CMAKE_SOURCE_DIR}/3rdparty/libenvpp/" -B "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/build/" + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -G${CMAKE_GENERATOR} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} + -D CMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER} -D CMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER} + BUILD_COMMAND "${CMAKE_COMMAND}" --build "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/build" --config ${CMAKE_BUILD_TYPE} --parallel + INSTALL_COMMAND "${CMAKE_COMMAND}" --install "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/build" --prefix "${CMAKE_CURRENT_BINARY_DIR}/ppc_libenvpp/install") + +string(TOLOWER "${CMAKE_BUILD_TYPE}" cmake_build_type_lower) +if(cmake_build_type_lower STREQUAL "debug") + set(PPC_FMT_LIB_NAME fmtd) +else() + set(PPC_FMT_LIB_NAME fmt) +endif() + +if(MSVC) + set(PPC_ENVPP_LIB_NAME libenvpp) +else() + set(PPC_ENVPP_LIB_NAME envpp) +endif () \ No newline at end of file diff --git a/codecov.yml b/codecov.yml index 2e8324752..c0647754f 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,12 +1,10 @@ ignore: - - "**/perf_tests/**" - - "**/func_tests/**" - - "**/all/runner.cpp" - - "**/mpi/runner.cpp" - - "**/omp/runner.cpp" - - "**/seq/runner.cpp" - - "**/stl/runner.cpp" - - "**/tbb/runner.cpp" + - "tasks/**/tests/**" + - "modules/**/tests/**" + - "tasks/common/runners/**" + - "modules/core/util/include/perf_test_util.hpp" + - "modules/core/util/include/func_test_util.hpp" + - "modules/core/util/src/func_test_util.cpp" coverage: status: project: diff --git a/modules/core/CMakeLists.txt b/modules/core/CMakeLists.txt index 0237aab38..26d597946 100644 --- a/modules/core/CMakeLists.txt +++ b/modules/core/CMakeLists.txt @@ -15,7 +15,7 @@ foreach(subd ${subdirs}) file(GLOB_RECURSE TMP_LIB_SOURCE_FILES ${PATH_PREFIX}/include/* ${PATH_PREFIX}/src/*) list(APPEND LIB_SOURCE_FILES ${TMP_LIB_SOURCE_FILES}) - file(GLOB_RECURSE TMP_FUNC_TESTS_SOURCE_FILES ${PATH_PREFIX}/func_tests/*) + file(GLOB_RECURSE TMP_FUNC_TESTS_SOURCE_FILES ${PATH_PREFIX}/tests/*) list(APPEND FUNC_TESTS_SOURCE_FILES ${TMP_FUNC_TESTS_SOURCE_FILES}) endforeach() @@ -23,10 +23,46 @@ project(${exec_func_lib}) add_library(${exec_func_lib} STATIC ${LIB_SOURCE_FILES}) set_target_properties(${exec_func_lib} PROPERTIES LINKER_LANGUAGE CXX) +add_dependencies(${exec_func_lib} ppc_libenvpp) +target_link_directories(${exec_func_lib} PUBLIC "${CMAKE_BINARY_DIR}/ppc_libenvpp/install/lib") +target_link_directories(${exec_func_lib} PUBLIC "${CMAKE_BINARY_DIR}/ppc_libenvpp/build") +target_link_libraries(${exec_func_lib} PUBLIC ${PPC_ENVPP_LIB_NAME}) +target_link_libraries(${exec_func_lib} PUBLIC ${PPC_FMT_LIB_NAME}) + +add_dependencies(${exec_func_lib} ppc_json) +target_link_directories(${exec_func_lib} INTERFACE "${CMAKE_BINARY_DIR}/ppc_json/install/include") + +add_dependencies(${exec_func_lib} ppc_googletest) +target_link_directories(${exec_func_lib} PUBLIC "${CMAKE_BINARY_DIR}/ppc_googletest/install/lib") +target_link_libraries(${exec_func_lib} PUBLIC gtest gtest_main) + +target_link_libraries(${exec_func_lib} PUBLIC Threads::Threads) + +find_package(OpenMP REQUIRED) +target_link_libraries(${exec_func_lib} PUBLIC ${OpenMP_libomp_LIBRARY} OpenMP::OpenMP_CXX) + +add_dependencies(${exec_func_lib} ppc_onetbb) +target_link_directories(${exec_func_lib} PUBLIC ${CMAKE_BINARY_DIR}/ppc_onetbb/install/lib) +if(NOT MSVC) + target_link_libraries(${exec_func_lib} PUBLIC ${PPC_TBB_LIB_NAME}) +endif() + +find_package(MPI REQUIRED) +if( MPI_COMPILE_FLAGS ) + set_target_properties(${exec_func_lib} PROPERTIES COMPILE_FLAGS "${MPI_COMPILE_FLAGS}") +endif( MPI_COMPILE_FLAGS ) + +if( MPI_LINK_FLAGS ) + set_target_properties(${exec_func_lib} PROPERTIES LINK_FLAGS "${MPI_LINK_FLAGS}") +endif( MPI_LINK_FLAGS ) +target_link_libraries(${exec_func_lib} PUBLIC ${MPI_LIBRARIES}) + + +add_library(stb_image STATIC ${CMAKE_SOURCE_DIR}/3rdparty/stb_image_wrapper.cpp) +target_include_directories(stb_image PUBLIC ${CMAKE_SOURCE_DIR}/3rdparty/stb) +target_link_libraries(${exec_func_lib} PUBLIC stb_image) + add_executable(${exec_func_tests} ${FUNC_TESTS_SOURCE_FILES}) -add_dependencies(${exec_func_tests} ppc_googletest) -target_link_directories(${exec_func_tests} PUBLIC ${CMAKE_BINARY_DIR}/ppc_googletest/install/lib) -target_link_libraries(${exec_func_tests} PUBLIC gtest gtest_main) target_link_libraries(${exec_func_tests} PUBLIC ${exec_func_lib}) diff --git a/modules/core/perf/func_tests/test_task.hpp b/modules/core/perf/func_tests/test_task.hpp deleted file mode 100644 index 545b837a5..000000000 --- a/modules/core/perf/func_tests/test_task.hpp +++ /dev/null @@ -1,50 +0,0 @@ -#pragma once - -#include -#include -#include - -#include "core/task/include/task.hpp" - -namespace ppc::test::perf { - -template -class TestTask : public ppc::core::Task { - public: - explicit TestTask(const std::vector& in) : input_(in) {} - - bool ValidationImpl() override { return !input_.empty(); } - - bool PreProcessingImpl() override { - output_ = 0; - return true; - } - - bool RunImpl() override { - for (unsigned i = 0; i < input_.size(); i++) { - output_ += input_[i]; - } - return true; - } - - bool PostProcessingImpl() override { return true; } - - T Get() { return output_; } - - private: - std::vector input_{}; - T output_; -}; - -template -class FakePerfTask : public TestTask { - public: - explicit FakePerfTask(const std::vector& in) : TestTask(in) {} - - bool RunImpl() override { - std::this_thread::sleep_for(std::chrono::seconds(11)); - return TestTask::RunImpl(); - } -}; - -} // namespace ppc::test::perf diff --git a/modules/core/perf/include/perf.hpp b/modules/core/perf/include/perf.hpp deleted file mode 100644 index 61cc68559..000000000 --- a/modules/core/perf/include/perf.hpp +++ /dev/null @@ -1,44 +0,0 @@ -#pragma once - -#include -#include -#include - -#include "core/task/include/task.hpp" - -namespace ppc::core { - -struct PerfAttr { - // count of task's running - uint64_t num_running = 10; - std::function current_timer = [&] { return 0.0; }; -}; - -struct PerfResults { - // measurement of task's time (in seconds) - double time_sec = 0.0; - enum TypeOfRunning : uint8_t { kPipeline, kTaskRun, kNone } type_of_running = kNone; - constexpr static double kMaxTime = 10.0; -}; - -class Perf { - public: - // Init performance analysis with initialized task and initialized data - explicit Perf(const std::shared_ptr& task_ptr); - // Check performance of full task's pipeline: PreProcessing() -> - // Validation() -> Run() -> PostProcessing() - void PipelineRun(const PerfAttr& perf_attr); - // Check performance of task's Run() function - void TaskRun(const PerfAttr& perf_attr); - // Pint results for automation checkers - void PrintPerfStatistic() const; - // Get performance result structure of the current task - PerfResults GetPerfResults(); - - private: - PerfResults perf_results_; - std::shared_ptr task_; - static void CommonRun(const PerfAttr& perf_attr, const std::function& pipeline, PerfResults& perf_results); -}; - -} // namespace ppc::core diff --git a/modules/core/perf/src/perf.cpp b/modules/core/perf/src/perf.cpp deleted file mode 100644 index aac88841c..000000000 --- a/modules/core/perf/src/perf.cpp +++ /dev/null @@ -1,97 +0,0 @@ -#include "core/perf/include/perf.hpp" - -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "core/task/include/task.hpp" - -ppc::core::Perf::Perf(const std::shared_ptr& task_ptr) : task_(task_ptr) { - task_ptr->GetStateOfTesting() = Task::StateOfTesting::kPerf; -} - -void ppc::core::Perf::PipelineRun(const PerfAttr& perf_attr) { - perf_results_.type_of_running = PerfResults::TypeOfRunning::kPipeline; - - CommonRun( - perf_attr, - [&]() { - task_->Validation(); - task_->PreProcessing(); - task_->Run(); - task_->PostProcessing(); - }, - perf_results_); -} - -void ppc::core::Perf::TaskRun(const PerfAttr& perf_attr) { - perf_results_.type_of_running = PerfResults::TypeOfRunning::kTaskRun; - - task_->Validation(); - task_->PreProcessing(); - CommonRun(perf_attr, [&]() { task_->Run(); }, perf_results_); - task_->PostProcessing(); - - task_->Validation(); - task_->PreProcessing(); - task_->Run(); - task_->PostProcessing(); -} - -void ppc::core::Perf::CommonRun(const PerfAttr& perf_attr, const std::function& pipeline, - ppc::core::PerfResults& perf_results) { - auto begin = perf_attr.current_timer(); - for (uint64_t i = 0; i < perf_attr.num_running; i++) { - pipeline(); - } - auto end = perf_attr.current_timer(); - perf_results.time_sec = end - begin; -} - -void ppc::core::Perf::PrintPerfStatistic() const { - std::string relative_path(::testing::UnitTest::GetInstance()->current_test_info()->file()); - std::string ppc_regex_template("parallel_programming_course"); - std::string perf_regex_template("perf_tests"); - std::string type_test_name; - - auto time_secs = perf_results_.time_sec; - - if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kTaskRun) { - type_test_name = "task_run"; - } else if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kPipeline) { - type_test_name = "pipeline"; - } else { - std::stringstream err_msg; - err_msg << '\n' << "The type of performance check for the task was not selected.\n"; - throw std::runtime_error(err_msg.str().c_str()); - } - - auto first_found_position = relative_path.find(ppc_regex_template) + ppc_regex_template.length() + 1; - relative_path.erase(0, first_found_position); - - auto last_found_position = relative_path.find(perf_regex_template) - 1; - relative_path.erase(last_found_position, relative_path.length() - 1); - - std::stringstream perf_res_str; - if (time_secs < PerfResults::kMaxTime) { - perf_res_str << std::fixed << std::setprecision(10) << time_secs; - std::cout << relative_path << ":" << type_test_name << ":" << perf_res_str.str() << '\n'; - } else { - std::stringstream err_msg; - err_msg << '\n' << "Task execute time need to be: "; - err_msg << "time < " << PerfResults::kMaxTime << " secs." << '\n'; - err_msg << "Original time in secs: " << time_secs << '\n'; - perf_res_str << std::fixed << std::setprecision(10) << -1.0; - std::cout << relative_path << ":" << type_test_name << ":" << perf_res_str.str() << '\n'; - throw std::runtime_error(err_msg.str().c_str()); - } -} - -ppc::core::PerfResults ppc::core::Perf::GetPerfResults() { return perf_results_; } diff --git a/modules/core/performance/include/performance.hpp b/modules/core/performance/include/performance.hpp new file mode 100644 index 000000000..2373c8296 --- /dev/null +++ b/modules/core/performance/include/performance.hpp @@ -0,0 +1,119 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core/task/include/task.hpp" + +namespace ppc::core { + +struct PerfAttr { + // count of task's running + uint64_t num_running = 5; + std::function current_timer = [&] { return -1.0; }; +}; + +struct PerfResults { + // measurement of task's time (in seconds) + double time_sec = 0.0; + enum TypeOfRunning : uint8_t { kPipeline, kTaskRun, kNone } type_of_running = kNone; + constexpr static double kMaxTime = 10.0; +}; + +template +class Perf { + public: + // Init performance analysis with an initialized task and initialized data + explicit Perf(const TaskPtr& task_ptr) : task_(task_ptr) { + task_ptr->GetStateOfTesting() = StateOfTesting::kPerf; + } + // Check performance of full task's pipeline: PreProcessing() -> + // Validation() -> Run() -> PostProcessing() + void PipelineRun(const PerfAttr& perf_attr) { + perf_results_.type_of_running = PerfResults::TypeOfRunning::kPipeline; + + CommonRun( + perf_attr, + [&]() { + task_->Validation(); + task_->PreProcessing(); + task_->Run(); + task_->PostProcessing(); + }, + perf_results_); + } + // Check performance of task's Run() function + void TaskRun(const PerfAttr& perf_attr) { + perf_results_.type_of_running = PerfResults::TypeOfRunning::kTaskRun; + + task_->Validation(); + task_->PreProcessing(); + CommonRun(perf_attr, [&]() { task_->Run(); }, perf_results_); + task_->PostProcessing(); + + task_->Validation(); + task_->PreProcessing(); + task_->Run(); + task_->PostProcessing(); + } + // Pint results for automation checkers + void PrintPerfStatistic(const std::string& test_id) const { + std::string type_test_name; + if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kTaskRun) { + type_test_name = "task_run"; + } else if (perf_results_.type_of_running == PerfResults::TypeOfRunning::kPipeline) { + type_test_name = "pipeline"; + } else { + std::stringstream err_msg; + err_msg << '\n' << "The type of performance check for the task was not selected.\n"; + throw std::runtime_error(err_msg.str().c_str()); + } + + auto time_secs = perf_results_.time_sec; + std::stringstream perf_res_str; + if (time_secs < PerfResults::kMaxTime) { + perf_res_str << std::fixed << std::setprecision(10) << time_secs; + std::cout << test_id << ":" << type_test_name << ":" << perf_res_str.str() << '\n'; + } else { + std::stringstream err_msg; + err_msg << '\n' << "Task execute time need to be: "; + err_msg << "time < " << PerfResults::kMaxTime << " secs." << '\n'; + err_msg << "Original time in secs: " << time_secs << '\n'; + perf_res_str << std::fixed << std::setprecision(10) << -1.0; + std::cout << test_id << ":" << type_test_name << ":" << perf_res_str.str() << '\n'; + throw std::runtime_error(err_msg.str().c_str()); + } + } + // Get performance result structure of the current task + PerfResults GetPerfResults() { return perf_results_; } + + private: + PerfResults perf_results_; + std::shared_ptr> task_; + static void CommonRun(const PerfAttr& perf_attr, const std::function& pipeline, PerfResults& perf_results) { + auto begin = perf_attr.current_timer(); + for (uint64_t i = 0; i < perf_attr.num_running; i++) { + pipeline(); + } + auto end = perf_attr.current_timer(); + perf_results.time_sec = (end - begin) / static_cast(perf_attr.num_running); + } +}; + +inline std::string GetStringParamName(ppc::core::PerfResults::TypeOfRunning type_of_running) { + if (type_of_running == core::PerfResults::kTaskRun) { + return "task_run"; + } + if (type_of_running == core::PerfResults::kPipeline) { + return "pipeline"; + } + return "none"; +} + +} // namespace ppc::core diff --git a/modules/core/perf/func_tests/perf_tests.cpp b/modules/core/performance/tests/perf_tests.cpp similarity index 58% rename from modules/core/perf/func_tests/perf_tests.cpp rename to modules/core/performance/tests/perf_tests.cpp index be774a94c..0d46a741c 100644 --- a/modules/core/perf/func_tests/perf_tests.cpp +++ b/modules/core/performance/tests/perf_tests.cpp @@ -5,27 +5,27 @@ #include #include -#include "core/perf/func_tests/test_task.hpp" -#include "core/perf/include/perf.hpp" +#include "core/performance/include/performance.hpp" +#include "core/performance/tests/test_task.hpp" TEST(perf_tests, check_perf_pipeline) { // Create data std::vector in(2000, 1); // Create Task - auto test_task = std::make_shared>(in); + auto test_task = std::make_shared, uint32_t>>(in); // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task); + ppc::core::Perf, uint32_t> perf_analyzer(test_task); // Create Perf attributes ppc::core::PerfAttr perf_attr; perf_analyzer.PipelineRun(perf_attr); // Get perf statistic - perf_analyzer.PrintPerfStatistic(); + perf_analyzer.PrintPerfStatistic("check_perf_pipeline"); ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime); - EXPECT_EQ(test_task->Get(), in.size()); + EXPECT_EQ(test_task->GetOutput(), in.size()); } TEST(perf_tests, check_perf_pipeline_float) { @@ -33,19 +33,19 @@ TEST(perf_tests, check_perf_pipeline_float) { std::vector in(2000, 1); // Create Task - auto test_task = std::make_shared>(in); + auto test_task = std::make_shared, float>>(in); // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task); + ppc::core::Perf, float> perf_analyzer(test_task); // Create Perf attributes ppc::core::PerfAttr perf_attr; perf_analyzer.PipelineRun(perf_attr); // Get perf statistic - perf_analyzer.PrintPerfStatistic(); + perf_analyzer.PrintPerfStatistic("check_perf_pipeline_float"); ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime); - EXPECT_EQ(test_task->Get(), in.size()); + EXPECT_EQ(test_task->GetOutput(), in.size()); } TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) { @@ -53,10 +53,10 @@ TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) { std::vector in(128, 1); // Create Task - auto test_task = std::make_shared>(in); + auto test_task = std::make_shared, uint8_t>>(in); // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task); + ppc::core::Perf, uint8_t> perf_analyzer(test_task); // Create Perf attributes ppc::core::PerfAttr perf_attr; @@ -71,7 +71,8 @@ TEST(perf_tests, check_perf_pipeline_uint8_t_slow_test) { perf_analyzer.PipelineRun(perf_attr); // Get perf statistic - ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic()); + // NOLINTNEXTLINE(cppcoreguidelines-avoid-goto) + ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic("check_perf_pipeline_uint8_t_slow_test")); } TEST(perf_tests, check_perf_task_exception) { @@ -79,13 +80,14 @@ TEST(perf_tests, check_perf_task_exception) { std::vector in(2000, 1); // Create Task - auto test_task = std::make_shared>(in); + auto test_task = std::make_shared, uint32_t>>(in); // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task); + ppc::core::Perf, uint32_t> perf_analyzer(test_task); // Get perf statistic - ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic()); + // NOLINTNEXTLINE(cppcoreguidelines-avoid-goto) + ASSERT_ANY_THROW(perf_analyzer.PrintPerfStatistic("check_perf_task_exception")); // Create Perf attributes ppc::core::PerfAttr perf_attr; @@ -97,17 +99,17 @@ TEST(perf_tests, check_perf_task_float) { std::vector in(2000, 1); // Create Task - auto test_task = std::make_shared>(in); + auto test_task = std::make_shared, float>>(in); // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task); + ppc::core::Perf, float> perf_analyzer(test_task); // Create Perf attributes ppc::core::PerfAttr perf_attr; perf_analyzer.TaskRun(perf_attr); // Get perf statistic - perf_analyzer.PrintPerfStatistic(); + perf_analyzer.PrintPerfStatistic("check_perf_task_float"); ASSERT_LE(perf_analyzer.GetPerfResults().time_sec, ppc::core::PerfResults::kMaxTime); - EXPECT_EQ(test_task->Get(), in.size()); + EXPECT_EQ(test_task->GetOutput(), in.size()); } diff --git a/modules/core/performance/tests/test_task.hpp b/modules/core/performance/tests/test_task.hpp new file mode 100644 index 000000000..bc984b697 --- /dev/null +++ b/modules/core/performance/tests/test_task.hpp @@ -0,0 +1,44 @@ +#pragma once + +#include +#include +#include + +#include "core/task/include/task.hpp" + +namespace ppc::test::perf { + +template +class TestTask : public ppc::core::Task { + public: + explicit TestTask(const InType& in) { this->GetInput() = in; } + + bool ValidationImpl() override { return !this->GetInput().empty(); } + + bool PreProcessingImpl() override { + this->GetOutput() = 0; + return true; + } + + bool RunImpl() override { + for (unsigned i = 0; i < this->GetInput().size(); i++) { + this->GetOutput() += this->GetInput()[i]; + } + return true; + } + + bool PostProcessingImpl() override { return true; } +}; + +template +class FakePerfTask : public TestTask { + public: + explicit FakePerfTask(const InType& in) : TestTask(in) {} + + bool RunImpl() override { + std::this_thread::sleep_for(std::chrono::seconds(11)); + return TestTask::RunImpl(); + } +}; + +} // namespace ppc::test::perf diff --git a/modules/core/runners/include/runners.hpp b/modules/core/runners/include/runners.hpp new file mode 100644 index 000000000..bf04ec5f2 --- /dev/null +++ b/modules/core/runners/include/runners.hpp @@ -0,0 +1,31 @@ +#pragma once + +#include + +#include +#include + +namespace ppc::core { + +class UnreadMessagesDetector : public ::testing::EmptyTestEventListener { + public: + UnreadMessagesDetector() = default; + void OnTestEnd(const ::testing::TestInfo& /*test_info*/) override; + + private: +}; + +class WorkerTestFailurePrinter : public ::testing::EmptyTestEventListener { + public: + explicit WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener> base) : base_(std::move(base)) {} + void OnTestEnd(const ::testing::TestInfo& test_info) override; + void OnTestPartResult(const ::testing::TestPartResult& test_part_result) override; + + private: + static void PrintProcessRank(); + std::shared_ptr<::testing::TestEventListener> base_; +}; + +int Init(int argc, char** argv); + +} // namespace ppc::core diff --git a/modules/core/runners/src/runners.cpp b/modules/core/runners/src/runners.cpp new file mode 100644 index 000000000..92e98eec5 --- /dev/null +++ b/modules/core/runners/src/runners.cpp @@ -0,0 +1,86 @@ +#include "core/runners/include/runners.hpp" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "oneapi/tbb/global_control.h" + +namespace ppc::core { + +void UnreadMessagesDetector::OnTestEnd(const ::testing::TestInfo& /*test_info*/) { + int rank = -1; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + + MPI_Barrier(MPI_COMM_WORLD); + + int flag = -1; + MPI_Status status; + + MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &flag, &status); + + if (flag != 0) { + std::cerr + << std::format( + "[ PROCESS {} ] [ FAILED ] MPI message queue has an unread message from process {} with tag {}", + rank, status.MPI_SOURCE, status.MPI_TAG) + << '\n'; + MPI_Abort(MPI_COMM_WORLD, EXIT_FAILURE); + } + + MPI_Barrier(MPI_COMM_WORLD); +} + +void WorkerTestFailurePrinter::OnTestEnd(const ::testing::TestInfo& test_info) { + if (test_info.result()->Passed()) { + return; + } + PrintProcessRank(); + base_->OnTestEnd(test_info); +} + +void WorkerTestFailurePrinter::OnTestPartResult(const ::testing::TestPartResult& test_part_result) { + if (test_part_result.passed() || test_part_result.skipped()) { + return; + } + PrintProcessRank(); + base_->OnTestPartResult(test_part_result); +} + +void WorkerTestFailurePrinter::PrintProcessRank() { + int rank = -1; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + std::cerr << std::format(" [ PROCESS {} ] ", rank); +} + +int Init(int argc, char** argv) { + MPI_Init(&argc, &argv); + + // Limit the number of threads in TBB + tbb::global_control control(tbb::global_control::max_allowed_parallelism, ppc::util::GetNumThreads()); + + ::testing::InitGoogleTest(&argc, argv); + + auto& listeners = ::testing::UnitTest::GetInstance()->listeners(); + int rank = -1; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + if (rank != 0 && (argc < 2 || argv[1] != std::string("--print-workers"))) { + auto* listener = listeners.Release(listeners.default_result_printer()); + listeners.Append(new ppc::core::WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener>(listener))); + } + listeners.Append(new ppc::core::UnreadMessagesDetector()); + auto status = RUN_ALL_TESTS(); + + MPI_Finalize(); + return status; +} + +} // namespace ppc::core diff --git a/modules/core/task/func_tests/test_task.hpp b/modules/core/task/func_tests/test_task.hpp deleted file mode 100644 index ec79cefbf..000000000 --- a/modules/core/task/func_tests/test_task.hpp +++ /dev/null @@ -1,50 +0,0 @@ -#pragma once - -#include -#include -#include - -#include "core/task/include/task.hpp" - -namespace ppc::test::task { - -template -class TestTask : public ppc::core::Task { - public: - explicit TestTask(const std::vector& in) : input_(in) {} - - bool ValidationImpl() override { return !input_.empty(); } - - bool PreProcessingImpl() override { - output_ = 0; - return true; - } - - bool RunImpl() override { - for (unsigned i = 0; i < input_.size(); i++) { - output_ += input_[i]; - } - return true; - } - - bool PostProcessingImpl() override { return true; } - - T Get() { return output_; } - - private: - std::vector input_{}; - T output_; -}; - -template -class FakeSlowTask : public TestTask { - public: - explicit FakeSlowTask(const std::vector& in) : TestTask(in) {} - - bool RunImpl() override { - std::this_thread::sleep_for(std::chrono::seconds(2)); - return TestTask::RunImpl(); - } -}; - -} // namespace ppc::test::task diff --git a/modules/core/task/include/task.hpp b/modules/core/task/include/task.hpp index 5d16d4753..5d3d48368 100644 --- a/modules/core/task/include/task.hpp +++ b/modules/core/task/include/task.hpp @@ -1,41 +1,166 @@ #pragma once +#include #include +#include #include +#include +#include +#include +#include +#include +#include +#include +#include +#include #include -#include + +using namespace std::chrono; namespace ppc::core { -// Memory of inputs and outputs need to be initialized before create object of +enum TypeOfTask : uint8_t { kALL, kMPI, kOMP, kSEQ, kSTL, kTBB, kUnknown }; +enum StatusOfTask : uint8_t { kEnabled, kDisabled }; + +inline std::string GetStringTaskStatus(StatusOfTask status_of_task) { + if (status_of_task == kDisabled) { + return "disabled"; + } + return "enabled"; +} + +inline std::string GetStringTaskType(TypeOfTask type_of_task, const std::string &settings_file_path) { + std::ifstream file(settings_file_path); + if (!file.is_open()) { + throw std::runtime_error("Failed to open file settings.json"); + } + + auto list_settings = ppc::util::InitJSONPtr(); + file >> *list_settings; + + auto to_type_str = [&](const std::string &type) -> std::string { + return type + "_" + std::string((*list_settings)["tasks"][type]); + }; + + if (type_of_task == TypeOfTask::kALL) { + return to_type_str("all"); + } + if (type_of_task == TypeOfTask::kSTL) { + return to_type_str("stl"); + } + if (type_of_task == TypeOfTask::kOMP) { + return to_type_str("omp"); + } + if (type_of_task == TypeOfTask::kMPI) { + return to_type_str("mpi"); + } + if (type_of_task == TypeOfTask::kTBB) { + return to_type_str("tbb"); + } + if (type_of_task == TypeOfTask::kSEQ) { + return to_type_str("seq"); + } + return "unknown"; +} + +enum StateOfTesting : uint8_t { kFunc, kPerf }; + +// Memory of inputs and outputs need to be initialized before create an object of // Task class +template class Task { public: - enum StateOfTesting : uint8_t { kFunc, kPerf }; - - explicit Task(StateOfTesting state_of_testing = StateOfTesting::kFunc); + explicit Task(StateOfTesting /*state_of_testing*/ = StateOfTesting::kFunc) { functions_order_.clear(); } // validation of data and validation of task attributes before running - virtual bool Validation() final; + virtual bool Validation() final { + InternalOrderTest(__builtin_FUNCTION()); + return ValidationImpl(); + } // pre-processing of input data - virtual bool PreProcessing() final; + virtual bool PreProcessing() final { + InternalOrderTest(__builtin_FUNCTION()); + if (state_of_testing_ == StateOfTesting::kFunc) { + InternalTimeTest(__builtin_FUNCTION()); + } + return PreProcessingImpl(); + } // realization of the current task - virtual bool Run() final; + virtual bool Run() final { + InternalOrderTest(__builtin_FUNCTION()); + return RunImpl(); + } // post-processing of output data - virtual bool PostProcessing() final; + virtual bool PostProcessing() final { + InternalOrderTest(__builtin_FUNCTION()); + if (state_of_testing_ == StateOfTesting::kFunc) { + InternalTimeTest(__builtin_FUNCTION()); + } + return PostProcessingImpl(); + } // get state of testing StateOfTesting &GetStateOfTesting() { return state_of_testing_; } - virtual ~Task(); + // set a type of task + void SetTypeOfTask(TypeOfTask type_of_task) { type_of_task_ = type_of_task; } - protected: - virtual void InternalOrderTest(const std::string &str) final; + // get a dynamic type of task + [[nodiscard]] TypeOfTask GetDynamicTypeOfTask() const { return type_of_task_; } + + // get a dynamic type of task + [[nodiscard]] StatusOfTask GetStatusOfTask() const { return status_of_task_; } + + // get a static type of task + static constexpr TypeOfTask GetStaticTypeOfTask() { return TypeOfTask::kUnknown; } + + InType &GetInput() { return input_; } - virtual void InternalTimeTest(const std::string &str) final; + OutType &GetOutput() { return output_; } + + virtual ~Task() { + if (!functions_order_.empty() || !was_worked_) { + std::cerr << "ORDER OF FUNCTIONS IS NOT RIGHT! \n Expected - \"Validation\", \"PreProcessing\", \"Run\", " + "\"PostProcessing\" \n"; + std::terminate(); + } else { + functions_order_.clear(); + } + } + + protected: + virtual void InternalOrderTest(const std::string &str) final { + functions_order_.push_back(str); + if (str == "PostProcessing" && IsFullPipelineStage()) { + functions_order_.clear(); + } else { + was_worked_ = true; + } + } + + virtual void InternalTimeTest(const std::string &str) final { + if (str == "PreProcessing") { + tmp_time_point_ = std::chrono::high_resolution_clock::now(); + } + + if (str == "PostProcessing") { + auto duration = duration_cast(high_resolution_clock::now() - tmp_time_point_).count(); + auto diff = static_cast(duration) * 1e-9; + + std::stringstream err_msg; + if (diff < kMaxTestTime) { + err_msg << "Test time:" << std::fixed << std::setprecision(10) << diff << '\n'; + } else { + err_msg << "\nTask execute time need to be: "; + err_msg << "time < " << kMaxTestTime << " secs.\n"; + err_msg << "Original time in secs: " << diff << '\n'; + throw std::runtime_error(err_msg.str().c_str()); + } + } + } // implementation of "Validation" function virtual bool ValidationImpl() = 0; @@ -50,14 +175,35 @@ class Task { virtual bool PostProcessingImpl() = 0; private: - StateOfTesting state_of_testing_; + InType input_; + OutType output_; + StateOfTesting state_of_testing_ = kFunc; + TypeOfTask type_of_task_ = kUnknown; + StatusOfTask status_of_task_ = kEnabled; std::vector functions_order_; std::vector right_functions_order_ = {"Validation", "PreProcessing", "Run", "PostProcessing"}; static constexpr double kMaxTestTime = 1.0; std::chrono::high_resolution_clock::time_point tmp_time_point_; bool was_worked_ = false; - bool IsFullPipelineStage(); + bool IsFullPipelineStage() { + auto it = std::adjacent_find(functions_order_.begin() + 2, + functions_order_.begin() + static_cast(functions_order_.size() - 2), + std::not_equal_to<>()); + + return (functions_order_.size() >= 4 && functions_order_[0] == "Validation" && + functions_order_[1] == "PreProcessing" && functions_order_[2] == "Run" && + it == (functions_order_.begin() + static_cast(functions_order_.size() - 2)) && + functions_order_[functions_order_.size() - 1] == "PostProcessing"); + } }; +template +using TaskPtr = std::shared_ptr>; + +template +std::shared_ptr TaskGetter(InType in) { + return std::make_shared(in); +} + } // namespace ppc::core diff --git a/modules/core/task/src/task.cpp b/modules/core/task/src/task.cpp deleted file mode 100644 index fc09a1dd0..000000000 --- a/modules/core/task/src/task.cpp +++ /dev/null @@ -1,103 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS -#include "core/task/include/task.hpp" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std::chrono; - -ppc::core::Task::Task(StateOfTesting state_of_testing) : state_of_testing_(state_of_testing) { - auto custom_terminate = []() { - std::cerr << "ORDER OF FUNCTIONS IS NOT RIGHT! \n" - "Expected - \"Validation\", \"PreProcessing\", \"Run\", \"PostProcessing\" \n"; - }; - std::set_terminate(custom_terminate); - functions_order_.clear(); -} - -bool ppc::core::Task::Validation() { - InternalOrderTest(__builtin_FUNCTION()); - return ValidationImpl(); -} - -bool ppc::core::Task::PreProcessing() { - InternalOrderTest(__builtin_FUNCTION()); - if (state_of_testing_ == StateOfTesting::kFunc) { - InternalTimeTest(__builtin_FUNCTION()); - } - return PreProcessingImpl(); -} - -bool ppc::core::Task::Run() { - InternalOrderTest(__builtin_FUNCTION()); - return RunImpl(); -} - -bool ppc::core::Task::PostProcessing() { - InternalOrderTest(__builtin_FUNCTION()); - if (state_of_testing_ == StateOfTesting::kFunc) { - InternalTimeTest(__builtin_FUNCTION()); - } - return PostProcessingImpl(); -} - -void ppc::core::Task::InternalOrderTest(const std::string &str) { - functions_order_.push_back(str); - if (str == "PostProcessing" && IsFullPipelineStage()) { - functions_order_.clear(); - } else { - was_worked_ = true; - } -} - -void ppc::core::Task::InternalTimeTest(const std::string &str) { - if (str == "PreProcessing") { - tmp_time_point_ = std::chrono::high_resolution_clock::now(); - } - - if (str == "PostProcessing") { - auto duration = duration_cast(high_resolution_clock::now() - tmp_time_point_).count(); - auto diff = static_cast(duration) * 1e-9; - - std::stringstream err_msg; - // NOLINTNEXTLINE(concurrency-mt-unsafe) - if (auto *env = std::getenv("PPC_IGNORE_TEST_TIME_LIMIT"); env != nullptr && std::string(env) == "1") { - err_msg << "Test time:" << std::fixed << std::setprecision(10) << diff << " (no time limit)" << '\n'; - } else if (diff < kMaxTestTime) { - err_msg << "Test time:" << std::fixed << std::setprecision(10) << diff << '\n'; - } else { - err_msg << "\nTask execute time need to be: "; - err_msg << "time < " << kMaxTestTime << " secs.\n"; - err_msg << "Original time in secs: " << diff << '\n'; - throw std::runtime_error(err_msg.str().c_str()); - } - } -} - -bool ppc::core::Task::IsFullPipelineStage() { - auto it = std::adjacent_find(functions_order_.begin() + 2, - functions_order_.begin() + static_cast(functions_order_.size() - 2), - std::not_equal_to<>()); - - return (functions_order_.size() >= 4 && functions_order_[0] == "Validation" && - functions_order_[1] == "PreProcessing" && functions_order_[2] == "Run" && - it == (functions_order_.begin() + static_cast(functions_order_.size() - 2)) && - functions_order_[functions_order_.size() - 1] == "PostProcessing"); -} - -ppc::core::Task::~Task() { - if (!functions_order_.empty() || !was_worked_) { - std::terminate(); - } else { - functions_order_.clear(); - } -} diff --git a/modules/core/task/func_tests/task_tests.cpp b/modules/core/task/tests/task_tests.cpp similarity index 70% rename from modules/core/task/func_tests/task_tests.cpp rename to modules/core/task/tests/task_tests.cpp index 293202ece..ec16979af 100644 --- a/modules/core/task/func_tests/task_tests.cpp +++ b/modules/core/task/tests/task_tests.cpp @@ -4,7 +4,7 @@ #include #include -#include "core/task/func_tests/test_task.hpp" +#include "core/task/tests/test_task.hpp" #include "core/util/include/util.hpp" TEST(task_tests, check_int32_t) { @@ -12,7 +12,7 @@ TEST(task_tests, check_int32_t) { std::vector in(20, 1); // Create and check Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, int32_t> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -22,7 +22,7 @@ TEST(task_tests, check_int32_t) { test_task.PostProcessing(); // Check Result - ASSERT_EQ(static_cast(test_task.Get()), in.size()); + ASSERT_EQ(static_cast(test_task.GetOutput()), in.size()); } TEST(task_tests, check_int32_t_slow) { @@ -30,7 +30,7 @@ TEST(task_tests, check_int32_t_slow) { std::vector in(20, 1); // Create and check Task - ppc::test::task::FakeSlowTask test_task(in); + ppc::test::task::FakeSlowTask, int32_t> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -45,7 +45,7 @@ TEST(task_tests, check_validate_func) { std::vector in; // Create and check Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, int32_t> test_task(in); bool is_valid = test_task.Validation(); // Check Result @@ -61,7 +61,7 @@ TEST(task_tests, check_double) { std::vector in(20, 1); // Create and check Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, double> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -71,7 +71,7 @@ TEST(task_tests, check_double) { test_task.PostProcessing(); // Check Result - EXPECT_NEAR(test_task.Get(), static_cast(in.size()), 1e-6); + EXPECT_NEAR(test_task.GetOutput(), static_cast(in.size()), 1e-6); } TEST(task_tests, check_uint8_t) { @@ -79,7 +79,7 @@ TEST(task_tests, check_uint8_t) { std::vector in(20, 1); // Create Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, uint8_t> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -89,7 +89,7 @@ TEST(task_tests, check_uint8_t) { test_task.PostProcessing(); // Check Result - ASSERT_EQ(static_cast(test_task.Get()), in.size()); + ASSERT_EQ(static_cast(test_task.GetOutput()), in.size()); } TEST(task_tests, check_int64_t) { @@ -97,7 +97,7 @@ TEST(task_tests, check_int64_t) { std::vector in(20, 1); // Create Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, int64_t> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -107,7 +107,7 @@ TEST(task_tests, check_int64_t) { test_task.PostProcessing(); // Check Result - ASSERT_EQ(static_cast(test_task.Get()), in.size()); + ASSERT_EQ(static_cast(test_task.GetOutput()), in.size()); } TEST(task_tests, check_float) { @@ -115,7 +115,7 @@ TEST(task_tests, check_float) { std::vector in(20, 1); // Create Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, float> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); @@ -125,16 +125,16 @@ TEST(task_tests, check_float) { test_task.PostProcessing(); // Check Result - EXPECT_NEAR(test_task.Get(), in.size(), 1e-3); + EXPECT_NEAR(test_task.GetOutput(), in.size(), 1e-3); } -DEATH_TEST(task_tests, check_wrong_order) { +DEATH_TEST(task_tests, check_wrong_order_disabled_valgrind) { auto destroy_function = [] { // Create data std::vector in(20, 1); // Create Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, float> test_task(in); bool is_valid = test_task.Validation(); ASSERT_EQ(is_valid, true); test_task.PreProcessing(); @@ -143,13 +143,13 @@ DEATH_TEST(task_tests, check_wrong_order) { EXPECT_DEATH_IF_SUPPORTED(destroy_function(), ".*ORDER OF FUNCTIONS IS NOT RIGHT.*"); } -DEATH_TEST(task_tests, check_empty_order) { +DEATH_TEST(task_tests, check_empty_order_disabled_valgrind) { auto destroy_function = [] { // Create data std::vector in(20, 1); // Create Task - ppc::test::task::TestTask test_task(in); + ppc::test::task::TestTask, float> test_task(in); }; EXPECT_DEATH_IF_SUPPORTED(destroy_function(), ".*ORDER OF FUNCTIONS IS NOT RIGHT.*"); } diff --git a/modules/core/task/tests/test_task.hpp b/modules/core/task/tests/test_task.hpp new file mode 100644 index 000000000..e3d872e2f --- /dev/null +++ b/modules/core/task/tests/test_task.hpp @@ -0,0 +1,44 @@ +#pragma once + +#include +#include +#include + +#include "core/task/include/task.hpp" + +namespace ppc::test::task { + +template +class TestTask : public ppc::core::Task { + public: + explicit TestTask(const InType& in) { this->GetInput() = in; } + + bool ValidationImpl() override { return !this->GetInput().empty(); } + + bool PreProcessingImpl() override { + this->GetOutput() = 0; + return true; + } + + bool RunImpl() override { + for (unsigned i = 0; i < this->GetInput().size(); i++) { + this->GetOutput() += this->GetInput()[i]; + } + return true; + } + + bool PostProcessingImpl() override { return true; } +}; + +template +class FakeSlowTask : public TestTask { + public: + explicit FakeSlowTask(const InType& in) : TestTask(in) {} + + bool RunImpl() override { + std::this_thread::sleep_for(std::chrono::seconds(2)); + return TestTask::RunImpl(); + } +}; + +} // namespace ppc::test::task diff --git a/modules/core/util/func_tests/util_tests.cpp b/modules/core/util/func_tests/util_tests.cpp deleted file mode 100644 index b55e7c2f2..000000000 --- a/modules/core/util/func_tests/util_tests.cpp +++ /dev/null @@ -1,36 +0,0 @@ -#include - -#include -#include -#include - -#include "core/util/include/util.hpp" - -TEST(util_tests, check_unset_env) { -#ifndef _WIN32 - int save_var = ppc::util::GetNumThreads(); - - unsetenv("PPC_NUM_THREADS"); // NOLINT(concurrency-mt-unsafe) - - EXPECT_EQ(ppc::util::GetNumThreads(), 1); - - setenv("PPC_NUM_THREADS", std::to_string(save_var).c_str(), 1); // NOLINT(concurrency-mt-unsafe) -#else - GTEST_SKIP(); -#endif -} - -TEST(util_tests, check_set_env) { -#ifndef _WIN32 - int save_var = ppc::util::GetNumThreads(); - - const int num_threads = static_cast(std::thread::hardware_concurrency()); - setenv("PPC_NUM_THREADS", std::to_string(num_threads).c_str(), 1); // NOLINT(concurrency-mt-unsafe) - - EXPECT_EQ(ppc::util::GetNumThreads(), num_threads); - - setenv("PPC_NUM_THREADS", std::to_string(save_var).c_str(), 1); // NOLINT(concurrency-mt-unsafe) -#else - GTEST_SKIP(); -#endif -} diff --git a/modules/core/util/include/func_test_util.hpp b/modules/core/util/include/func_test_util.hpp new file mode 100644 index 000000000..5493e80d0 --- /dev/null +++ b/modules/core/util/include/func_test_util.hpp @@ -0,0 +1,127 @@ +#pragma once + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core/task/include/task.hpp" +#include "core/util/include/util.hpp" + +namespace ppc::util { + +template +using FuncTestParam = std::tuple(InType)>, std::string, TestType>; + +template +using GTestFuncParam = ::testing::TestParamInfo>; + +template +concept HasPrintTestParam = requires(TestType value) { + { T::PrintTestParam(value) } -> std::same_as; +}; + +template +class BaseRunFuncTests : public ::testing::TestWithParam> { + public: + virtual bool CheckTestOutputData(OutType& output_data) = 0; + virtual InType GetTestInputData() = 0; + + template + static void RequireStaticInterface() { + static_assert(HasPrintTestParam, + "Derived class must implement: static std::string PrintTestParam(TestType)"); + } + + template + static std::string PrintFuncTestName(const GTestFuncParam& info) { + RequireStaticInterface(); + TestType test_param = std::get(info.param); + return std::get(info.param) + "_" + Derived::PrintTestParam(test_param); + } + + protected: + void ExecuteTest(FuncTestParam test_param) { + const std::string& test_name = std::get(test_param); + + ValidateTestName(test_name); + + if (IsTestDisabled(test_name)) { + GTEST_SKIP(); + } + + if (ShouldSkipNonMpiTask(test_name)) { + std::cerr << "kALL and kMPI tasks are not under mpirun\n"; + GTEST_SKIP(); + } + + InitializeAndRunTask(test_param); + } + + void ValidateTestName(const std::string& test_name) { EXPECT_FALSE(test_name.find("unknown") != std::string::npos); } + + bool IsTestDisabled(const std::string& test_name) { return test_name.find("disabled") != std::string::npos; } + + bool ShouldSkipNonMpiTask(const std::string& test_name) { + auto contains_substring = [&](const std::string& substring) { + return test_name.find(substring) != std::string::npos; + }; + + return !ppc::util::IsUnderMpirun() && (contains_substring("_all") || contains_substring("_mpi")); + } + + void InitializeAndRunTask(const FuncTestParam& test_param) { + task_ = std::get(test_param)(GetTestInputData()); + + EXPECT_TRUE(task_->Validation()); + EXPECT_TRUE(task_->PreProcessing()); + EXPECT_TRUE(task_->Run()); + EXPECT_TRUE(task_->PostProcessing()); + EXPECT_TRUE(CheckTestOutputData(task_->GetOutput())); + } + + private: + ppc::core::TaskPtr task_; +}; + +template +auto ExpandToValuesImpl(const Tuple& t, std::index_sequence /*unused*/) { + return ::testing::Values(std::get(t)...); +} + +template +auto ExpandToValues(const Tuple& t) { + constexpr std::size_t kN = std::tuple_size_v; + return ExpandToValuesImpl(t, std::make_index_sequence{}); +} + +template +auto GenTaskTuplesImpl(const SizesContainer& sizes, const std::string& settings_path, + std::index_sequence /*unused*/) { + return std::make_tuple(std::make_tuple(ppc::core::TaskGetter, + std::string(GetNamespace()) + "_" + + ppc::core::GetStringTaskType(Task::GetStaticTypeOfTask(), settings_path), + sizes[Is])...); +} + +template +auto TaskListGenerator(const SizesContainer& sizes, const std::string& settings_path) { + return GenTaskTuplesImpl(sizes, settings_path, + std::make_index_sequence>>{}); +} + +template +constexpr auto AddFuncTask(const SizesContainer& sizes, const std::string& settings_path) { + return TaskListGenerator(sizes, settings_path); +} + +} // namespace ppc::util diff --git a/modules/core/util/include/perf_test_util.hpp b/modules/core/util/include/perf_test_util.hpp new file mode 100644 index 000000000..460f739ae --- /dev/null +++ b/modules/core/util/include/perf_test_util.hpp @@ -0,0 +1,128 @@ +#pragma once + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core/performance/include/performance.hpp" +#include "core/task/include/task.hpp" +#include "core/util/include/util.hpp" + +namespace ppc::util { + +double GetTimeMPI(); +int GetMPIRank(); + +template +using PerfTestParam = std::tuple(InType)>, std::string, + ppc::core::PerfResults::TypeOfRunning>; + +template +class BaseRunPerfTests : public ::testing::TestWithParam> { + public: + static std::string CustomPerfTestName(const ::testing::TestParamInfo>& info) { + return ppc::core::GetStringParamName(std::get(info.param)) + "_" + + std::get(info.param); + } + + protected: + virtual bool CheckTestOutputData(OutType& output_data) = 0; + virtual InType GetTestInputData() = 0; + + virtual void SetPerfAttributes(ppc::core::PerfAttr& perf_attrs) { + if (task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kMPI || + task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kALL) { + const double t0 = GetTimeMPI(); + perf_attrs.current_timer = [t0] { return GetTimeMPI() - t0; }; + } else if (task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kOMP) { + const double t0 = omp_get_wtime(); + perf_attrs.current_timer = [t0] { return omp_get_wtime() - t0; }; + } else if (task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kSEQ || + task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kSTL || + task_->GetDynamicTypeOfTask() == ppc::core::TypeOfTask::kTBB) { + const auto t0 = std::chrono::high_resolution_clock::now(); + perf_attrs.current_timer = [&] { + auto now = std::chrono::high_resolution_clock::now(); + auto ns = std::chrono::duration_cast(now - t0).count(); + return static_cast(ns) * 1e-9; + }; + } else { + throw std::runtime_error("The task type is not supported for performance testing."); + } + } + + void ExecuteTest(const PerfTestParam& perf_test_param) { + auto task_getter = std::get(perf_test_param); + auto test_name = std::get(perf_test_param); + auto mode = std::get(perf_test_param); + + ASSERT_FALSE(test_name.find("unknown") != std::string::npos); + if (test_name.find("disabled") != std::string::npos) { + GTEST_SKIP(); + } + + task_ = task_getter(GetTestInputData()); + ppc::core::Perf perf(task_); + ppc::core::PerfAttr perf_attr; + SetPerfAttributes(perf_attr); + + if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { + perf.PipelineRun(perf_attr); + } else if (mode == ppc::core::PerfResults::TypeOfRunning::kTaskRun) { + perf.TaskRun(perf_attr); + } else { + std::stringstream err_msg; + err_msg << '\n' << "The type of performance check for the task was not selected.\n"; + throw std::runtime_error(err_msg.str().c_str()); + } + + if (GetMPIRank() == 0) { + perf.PrintPerfStatistic(test_name); + } + + OutType output_data = task_->GetOutput(); + ASSERT_TRUE(CheckTestOutputData(output_data)); + } + + private: + ppc::core::TaskPtr task_; +}; + +template +auto MakePerfTaskTuples(const std::string& settings_path) { + const auto name = std::string(GetNamespace()) + "_" + + ppc::core::GetStringTaskType(TaskType::GetStaticTypeOfTask(), settings_path); + + return std::make_tuple(std::make_tuple(ppc::core::TaskGetter, name, + ppc::core::PerfResults::TypeOfRunning::kPipeline), + std::make_tuple(ppc::core::TaskGetter, name, + ppc::core::PerfResults::TypeOfRunning::kTaskRun)); +} + +template +auto TupleToGTestValuesImpl(Tuple&& tup, std::index_sequence /*unused*/) { + return ::testing::Values(std::get(std::forward(tup))...); +} + +template +auto TupleToGTestValues(Tuple&& tup) { + constexpr size_t kSize = std::tuple_size>::value; + return TupleToGTestValuesImpl(std::forward(tup), std::make_index_sequence{}); +} + +template +auto MakeAllPerfTasks(const std::string& settings_path) { + return std::tuple_cat(MakePerfTaskTuples(settings_path)...); +} + +} // namespace ppc::util diff --git a/modules/core/util/include/util.hpp b/modules/core/util/include/util.hpp index 14da7994e..b645f2966 100644 --- a/modules/core/util/include/util.hpp +++ b/modules/core/util/include/util.hpp @@ -1,9 +1,25 @@ #pragma once + +#include +#include +#include #include +#include + +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4459) +#endif + +#include // NOLINT(misc-include-cleaner) + +#ifdef _MSC_VER +#pragma warning(pop) +#endif /* NOLINTBEGIN */ -#define INSTANTIATE_TEST_SUITE_P_NOLINT(prefix, test_case_name, generator) \ - INSTANTIATE_TEST_SUITE_P(prefix, test_case_name, generator) +#define INSTANTIATE_TEST_SUITE_P_NOLINT(prefix, test_case_name, generator, custom_test_name) \ + INSTANTIATE_TEST_SUITE_P(prefix, test_case_name, generator, custom_test_name) /* NOLINTEND */ /* NOLINTBEGIN */ @@ -12,7 +28,72 @@ namespace ppc::util { -std::string GetAbsolutePath(const std::string &relative_path); +enum GTestParamIndex : uint8_t { kTaskGetter, kNameTest, kTestParams }; + +std::string GetAbsoluteTaskPath(const std::string& id_path, const std::string& relative_path); int GetNumThreads(); +template +constexpr std::string_view GetNamespace() { +#if defined(__clang__) || defined(__GNUC__) + constexpr std::string_view kFunc = __PRETTY_FUNCTION__; + constexpr std::string_view kKey = "T = "; + + auto start = kFunc.find(kKey); + if (start == std::string_view::npos) { + return {}; + } + start += kKey.size(); + + auto end = kFunc.find_first_of(";]> ,", start); + if (end == std::string_view::npos) { + return {}; + } + + auto full_type = kFunc.substr(start, end - start); + + auto ns_end = full_type.rfind("::"); + if (ns_end == std::string_view::npos) { + return {}; + } + + return full_type.substr(0, ns_end); + +#elif defined(_MSC_VER) + constexpr std::string_view kFunc = __FUNCSIG__; + constexpr std::string_view kKey = "GetNamespace<"; + + auto start = kFunc.find(kKey); + if (start == std::string_view::npos) return {}; + start += kKey.size(); + + constexpr std::string_view prefixes[] = {"class ", "struct ", "enum ", "union "}; + for (auto prefix : prefixes) { + if (kFunc.substr(start, prefix.size()) == prefix) { + start += prefix.size(); + break; + } + } + + auto end = kFunc.find('>', start); + if (end == std::string_view::npos) return {}; + + auto full_type = kFunc.substr(start, end - start); + + auto ns_end = full_type.rfind("::"); + if (ns_end == std::string_view::npos) return {}; + + return full_type.substr(0, ns_end); + +#else + static_assert([] { return false; }(), "Unsupported compiler"); + return {}; +#endif +} + +// NOLINTNEXTLINE(misc-include-cleaner) +inline std::shared_ptr InitJSONPtr() { return std::make_shared(); } + +bool IsUnderMpirun(); + } // namespace ppc::util diff --git a/modules/core/util/src/func_test_util.cpp b/modules/core/util/src/func_test_util.cpp new file mode 100644 index 000000000..6f6b19af7 --- /dev/null +++ b/modules/core/util/src/func_test_util.cpp @@ -0,0 +1,11 @@ +#include + +#include "core/util/include/perf_test_util.hpp" + +double ppc::util::GetTimeMPI() { return MPI_Wtime(); } + +int ppc::util::GetMPIRank() { + int rank = -1; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + return rank; +} diff --git a/modules/core/util/src/util.cpp b/modules/core/util/src/util.cpp index c5d94be91..e1eb3f3d3 100644 --- a/modules/core/util/src/util.cpp +++ b/modules/core/util/src/util.cpp @@ -1,33 +1,39 @@ #include "core/util/include/util.hpp" -#include -#ifdef _WIN32 -#include -#include -#include -#include -#endif - +#include +#include #include +#include #include -std::string ppc::util::GetAbsolutePath(const std::string &relative_path) { +namespace { + +std::string GetAbsolutePath(const std::string& relative_path) { const std::filesystem::path path = std::string(PPC_PATH_TO_PROJECT) + "/tasks/" + relative_path; return path.string(); } +} // namespace + +std::string ppc::util::GetAbsoluteTaskPath(const std::string& id_path, const std::string& relative_path) { + return GetAbsolutePath(id_path + "/data/" + relative_path); +} + int ppc::util::GetNumThreads() { -#ifdef _WIN32 - size_t len; - char omp_env[100]; - errno_t err = getenv_s(&len, omp_env, sizeof(omp_env), "PPC_NUM_THREADS"); - if (err != 0 || len == 0) { - omp_env[0] = '\0'; + const auto num_threads = env::get("PPC_NUM_THREADS"); + if (num_threads.has_value()) { + return num_threads.value(); } - int num_threads = std::atoi(omp_env); -#else - const char *omp_env = std::getenv("PPC_NUM_THREADS"); // NOLINT(concurrency-mt-unsafe) - int num_threads = (omp_env != nullptr) ? std::atoi(omp_env) : 1; -#endif - return num_threads; + return 1; +} + +constexpr std::array kMpiEnvVars = { + "OMPI_COMM_WORLD_SIZE", "OMPI_UNIVERSE_SIZE", "PMI_SIZE", "PMI_RANK", "PMI_FD", + "HYDRA_CONTROL_FD", "PMIX_RANK", "SLURM_PROCID", "MSMPI_RANK", "MSMPI_LOCALRANK"}; + +bool ppc::util::IsUnderMpirun() { + return std::ranges::any_of(kMpiEnvVars, [&](const auto& env_var) { + const auto mpi_env = env::get(env_var); + return static_cast(mpi_env.has_value()); + }); } diff --git a/modules/core/util/tests/util.cpp b/modules/core/util/tests/util.cpp new file mode 100644 index 000000000..929272926 --- /dev/null +++ b/modules/core/util/tests/util.cpp @@ -0,0 +1,27 @@ +#include "core/util/include/util.hpp" + +#include + +#include + +#include "omp.h" + +namespace my::nested { +struct Type {}; +} // namespace my::nested + +TEST(util_tests, extracts_correct_namespace) { + constexpr std::string_view kNs = ppc::util::GetNamespace(); + EXPECT_EQ(kNs, "my::nested"); +} + +TEST(util_tests, threads_control_check_openmp_disabled_valgrind) { + int ppc_num_threads = ppc::util::GetNumThreads(); + + int omp_num_threads = -1; +#pragma omp parallel default(none) shared(omp_num_threads) num_threads(ppc::util::GetNumThreads()) + omp_num_threads = omp_get_num_threads(); + + // Check Result + ASSERT_EQ(ppc_num_threads, omp_num_threads); +} diff --git a/scripts/run_perf_counter.py b/scripts/run_perf_counter.py deleted file mode 100644 index f8840fcbe..000000000 --- a/scripts/run_perf_counter.py +++ /dev/null @@ -1,48 +0,0 @@ -import subprocess -import re -import sys -from pathlib import Path - - -def init_cmd_args(): - import argparse - parser = argparse.ArgumentParser() - parser.add_argument( - "--required-tests-number", - required=True, - type=int, - help="Specify the number of tests to run (must be an integer)." - ) - args = parser.parse_args() - _args_dict = vars(args) - return _args_dict - - -def get_project_path(): - script_path = Path(__file__).resolve() - script_dir = script_path.parent - return script_dir.parent - - -def run_script(_script_path): - result = subprocess.run( - f"{sys.executable} {_script_path} --running-type=performance-list", shell=True, capture_output=True, text=True) - if result.returncode != 0: - raise Exception(f"Subprocess return {result.returncode}.") - - print(result.stdout) - print(result.stderr) - return result.stdout.splitlines() - - -if __name__ == "__main__": - args_dict = init_cmd_args() - tests_list = run_script(Path(get_project_path()) / "scripts/run_tests.py") - tests_number = int((len(tests_list) / 3) * 2) - - pattern = r".*GetParam().*" - test_matches = [test_name for test_name in tests_list if re.match(pattern, test_name)] - required_tests_number = int((args_dict["required_tests_number"] + 1) * len(test_matches) / 3) - - if tests_number != required_tests_number: - raise Exception(f"Count of all tests {tests_number} != count of required tests {required_tests_number}.") diff --git a/scripts/run_tests.py b/scripts/run_tests.py index f75dbe4e4..6bc6cafad 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -10,7 +10,7 @@ def init_cmd_args(): parser.add_argument( "--running-type", required=True, - choices=["threads", "processes", "performance", "performance-list"], + choices=["threads", "processes", "performance"], help="Specify the execution mode. Choose 'threads' for multithreading or 'processes' for multiprocessing." ) parser.add_argument( @@ -26,7 +26,11 @@ def init_cmd_args(): class PPCRunner: def __init__(self): + self.__ppc_num_threads = None + self.__ppc_num_proc = None + self.__ppc_env = None self.work_dir = None + self.valgrind_cmd = "valgrind --error-exitcode=1 --leak-check=full --show-leak-kinds=all" if platform.system() == "Windows": @@ -40,75 +44,79 @@ def __get_project_path(): script_dir = script_path.parent # Directory containing the script return script_dir.parent - def setup_env(self): + def setup_env(self, ppc_env): + self.__ppc_env = ppc_env + + self.__ppc_num_threads = self.__ppc_env.get("PPC_NUM_THREADS") + if self.__ppc_num_threads is None: + raise EnvironmentError("Required environment variable 'PPC_NUM_THREADS' is not set.") + self.__ppc_env["OMP_NUM_THREADS"] = self.__ppc_num_threads + + self.__ppc_num_proc = self.__ppc_env.get("PPC_NUM_PROC") + if self.__ppc_num_proc is None: + raise EnvironmentError("Required environment variable 'PPC_NUM_PROC' is not set.") + if (Path(self.__get_project_path()) / "install").exists(): self.work_dir = Path(self.__get_project_path()) / "install" / "bin" else: self.work_dir = Path(self.__get_project_path()) / "build" / "bin" - @staticmethod - def __run_exec(command): - result = subprocess.run(command, shell=True, env=os.environ) + def __run_exec(self, command): + result = subprocess.run(command, shell=True, env=self.__ppc_env) if result.returncode != 0: raise Exception(f"Subprocess return {result.returncode}.") @staticmethod - def __get_gtest_settings(repeats_count): + def __get_gtest_settings(repeats_count, type_task): command = f"--gtest_repeat={repeats_count} " command += "--gtest_recreate_environments_when_repeating " command += "--gtest_color=0 " + command += "--gtest_shuffle " + command += f"--gtest_filter=\"*{type_task}*\" " return command def run_threads(self): - if platform.system() == "Linux" and not os.environ.get("PPC_ASAN_RUN"): - self.__run_exec(f"{self.valgrind_cmd} {self.work_dir / 'seq_func_tests'} {self.__get_gtest_settings(1)}") - self.__run_exec(f"{self.valgrind_cmd} {self.work_dir / 'stl_func_tests'} {self.__get_gtest_settings(1)}") + if platform.system() == "Linux" and not self.__ppc_env.get("PPC_ASAN_RUN"): + for task_type in ["seq", "stl"]: + self.__run_exec(f"{self.valgrind_cmd} {self.work_dir / 'ppc_func_tests'} " + f"{self.__get_gtest_settings(1, '_' + task_type + '_')}") - self.__run_exec(f"{self.work_dir / 'seq_func_tests'} {self.__get_gtest_settings(3)}") - self.__run_exec(f"{self.work_dir / 'stl_func_tests'} {self.__get_gtest_settings(3)}") - self.__run_exec(f"{self.work_dir / 'tbb_func_tests'} {self.__get_gtest_settings(3)}") - self.__run_exec(f"{self.work_dir / 'omp_func_tests'} {self.__get_gtest_settings(3)}") + for task_type in ["omp", "seq", "stl", "tbb"]: + self.__run_exec(f"{self.work_dir / 'ppc_func_tests'} {self.__get_gtest_settings(3, '_' + task_type + '_')}") def run_core(self): - if platform.system() == "Linux" and not os.environ.get("PPC_ASAN_RUN"): - self.__run_exec(f"{self.valgrind_cmd} {self.work_dir / 'core_func_tests'} {self.__get_gtest_settings(1)}") + if platform.system() == "Linux" and not self.__ppc_env.get("PPC_ASAN_RUN"): + self.__run_exec(f"{self.valgrind_cmd} {self.work_dir / 'core_func_tests'} " + f"{self.__get_gtest_settings(1, '*')} --gtest_filter=*:-*_disabled_valgrind") - self.__run_exec(f"{self.work_dir / 'core_func_tests'} {self.__get_gtest_settings(1)}") + self.__run_exec(f"{self.work_dir / 'core_func_tests'} {self.__get_gtest_settings(1, '*')}") def run_processes(self, additional_mpi_args): - PPC_NUM_PROC = os.environ.get("PPC_NUM_PROC") - if PPC_NUM_PROC is None: + ppc_num_proc = self.__ppc_env.get("PPC_NUM_PROC") + if ppc_num_proc is None: raise EnvironmentError("Required environment variable 'PPC_NUM_PROC' is not set.") - mpi_running = f"{self.mpi_exec} {additional_mpi_args} -np {PPC_NUM_PROC}" - if not os.environ.get("PPC_ASAN_RUN"): - self.__run_exec(f"{mpi_running} {self.work_dir / 'all_func_tests'} {self.__get_gtest_settings(10)}") - self.__run_exec(f"{mpi_running} {self.work_dir / 'mpi_func_tests'} {self.__get_gtest_settings(10)}") + mpi_running = f"{self.mpi_exec} {additional_mpi_args} -np {ppc_num_proc}" + if not self.__ppc_env.get("PPC_ASAN_RUN"): + for task_type in ["all", "mpi"]: + self.__run_exec(f"{mpi_running} {self.work_dir / 'ppc_func_tests'} " + f"{self.__get_gtest_settings(10, '_' + task_type)}") def run_performance(self): - if not os.environ.get("PPC_ASAN_RUN"): - PPC_NUM_PROC = os.environ.get("PPC_NUM_PROC") - if PPC_NUM_PROC is None: - raise EnvironmentError("Required environment variable 'PPC_NUM_PROC' is not set.") - mpi_running = f"{self.mpi_exec} -np {PPC_NUM_PROC}" - self.__run_exec(f"{mpi_running} {self.work_dir / 'all_perf_tests'} {self.__get_gtest_settings(1)}") - self.__run_exec(f"{mpi_running} {self.work_dir / 'mpi_perf_tests'} {self.__get_gtest_settings(1)}") + if not self.__ppc_env.get("PPC_ASAN_RUN"): + mpi_running = f"{self.mpi_exec} -np {self.__ppc_num_proc}" + for task_type in ["all", "mpi"]: + self.__run_exec(f"{mpi_running} {self.work_dir / 'ppc_perf_tests'} " + f"{self.__get_gtest_settings(1, '_' + task_type)}") - self.__run_exec(f"{self.work_dir / 'omp_perf_tests'} {self.__get_gtest_settings(1)}") - self.__run_exec(f"{self.work_dir / 'seq_perf_tests'} {self.__get_gtest_settings(1)}") - self.__run_exec(f"{self.work_dir / 'stl_perf_tests'} {self.__get_gtest_settings(1)}") - self.__run_exec(f"{self.work_dir / 'tbb_perf_tests'} {self.__get_gtest_settings(1)}") - - def run_performance_list(self): - for task_type in ["all", "mpi", "omp", "seq", "stl", "tbb"]: - self.__run_exec(f"{self.work_dir / f'{task_type}_perf_tests'} --gtest_list_tests") + for task_type in ["omp", "seq", "stl", "tbb"]: + self.__run_exec(f"{self.work_dir / 'ppc_perf_tests'} {self.__get_gtest_settings(1, '_' + task_type)}") if __name__ == "__main__": args_dict = init_cmd_args() - ppc_runner = PPCRunner() - ppc_runner.setup_env() + ppc_runner.setup_env(os.environ.copy()) if args_dict["running_type"] in ["threads", "processes"]: ppc_runner.run_core() @@ -119,7 +127,5 @@ def run_performance_list(self): ppc_runner.run_processes(args_dict["additional_mpi_args"]) elif args_dict["running_type"] == "performance": ppc_runner.run_performance() - elif args_dict["running_type"] == "performance-list": - ppc_runner.run_performance_list() else: raise Exception("running-type is wrong!") diff --git a/tasks/CMakeLists.txt b/tasks/CMakeLists.txt index 1a74fcade..9863ef1c1 100644 --- a/tasks/CMakeLists.txt +++ b/tasks/CMakeLists.txt @@ -1,113 +1,41 @@ -message(STATUS "Student's tasks") - -list(APPEND LIST_OF_TASKS "mpi") -list(APPEND LIST_OF_TASKS "omp") -list(APPEND LIST_OF_TASKS "seq") -list(APPEND LIST_OF_TASKS "stl") -list(APPEND LIST_OF_TASKS "tbb") -list(APPEND LIST_OF_TASKS "all") +cmake_minimum_required(VERSION 3.15) +project(parallel_programming_course LANGUAGES C CXX) -add_compile_definitions(PATH_TO_PPC_PROJECT="${CMAKE_SOURCE_DIR}") - -add_library(stb_image INTERFACE) +message(STATUS "Student's tasks") -foreach(TASK_TYPE ${LIST_OF_TASKS}) - set(PATH_TO_TASK "${CMAKE_CURRENT_SOURCE_DIR}/${TASK_TYPE}") - get_filename_component(MODULE_NAME ${PATH_TO_TASK} NAME) - message(STATUS "${MODULE_NAME} tasks") - set(exec_func_tests "${MODULE_NAME}_func_tests") - set(exec_perf_tests "${MODULE_NAME}_perf_tests") - set(exec_func_lib "${MODULE_NAME}_module_lib") - set(project_suffix "_${MODULE_NAME}") +# ——— Testing options ———————————————————————————————————————— +option(USE_FUNC_TESTS "Enable functional tests" OFF) +option(USE_PERF_TESTS "Enable performance tests" OFF) - SUBDIRLIST(subdirs ${PATH_TO_TASK}) - foreach(subd ${subdirs}) - if ("${subd}" MATCHES "_disabled$") - get_filename_component(DIR_NAME ${PATH_TO_TASK} NAME) - list(APPEND LIST_OF_REVERTS "${DIR_NAME}_${subd}") - continue() - endif() - get_filename_component(PROJECT_ID ${subd} NAME) - set(PATH_PREFIX "${PATH_TO_TASK}/${subd}") - message(STATUS "-- ${PROJECT_ID}${project_suffix}") +# Test runner executables +set(FUNC_TEST_EXEC ppc_func_tests) +set(PERF_TEST_EXEC ppc_perf_tests) - file(GLOB_RECURSE TMP_LIB_SOURCE_FILES "${PATH_PREFIX}/include/*" "${PATH_PREFIX}/src/*") - list(APPEND LIB_SOURCE_FILES ${TMP_LIB_SOURCE_FILES}) +# ——— Global compile definitions ————————————————————————————————————— +add_compile_definitions( + PATH_TO_PPC_PROJECT="${PROJECT_SOURCE_DIR}" +) - file(GLOB SRC_RES "${PATH_PREFIX}/src/*") - list(APPEND SRC_RES ${TMP_SRC_RES}) +# ——— Include helper scripts —————————————————————————————————————— +include(${CMAKE_SOURCE_DIR}/cmake/functions.cmake) - file(GLOB_RECURSE TMP_FUNC_TESTS_SOURCE_FILES "${PATH_PREFIX}/func_tests/*") - list(APPEND FUNC_TESTS_SOURCE_FILES ${TMP_FUNC_TESTS_SOURCE_FILES}) +# ——— Initialize test executables ————————————————————————————————————— +ppc_add_test(${FUNC_TEST_EXEC} common/runners/functional.cpp USE_FUNC_TESTS) +ppc_add_test(${PERF_TEST_EXEC} common/runners/performance.cpp USE_PERF_TESTS) - file(GLOB_RECURSE TMP_PERF_TESTS_SOURCE_FILES "${PATH_PREFIX}/perf_tests/*") - list(APPEND PERF_TESTS_SOURCE_FILES ${TMP_PERF_TESTS_SOURCE_FILES}) - endforeach() +# ——— List of implementations ———————————————————————————————————————— +set(IMPLEMENTATIONS all mpi omp seq stl tbb) - project(${exec_func_lib}) - list(LENGTH SRC_RES RES_LEN) - if(RES_LEN EQUAL 0) - add_library(${exec_func_lib} INTERFACE ${LIB_SOURCE_FILES}) - else() - add_library(${exec_func_lib} STATIC ${LIB_SOURCE_FILES}) +# ——— Configure each subproject ————————————————————————————————————— +file(GLOB subdirs RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/*") +foreach(sub IN LISTS subdirs) + if(IS_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/${sub}" AND NOT sub STREQUAL "common") + ppc_configure_subproject(${sub}) endif() - set_target_properties(${exec_func_lib} PROPERTIES LINKER_LANGUAGE CXX) - - if (USE_FUNC_TESTS) - add_executable(${exec_func_tests} ${FUNC_TESTS_SOURCE_FILES} "${PATH_TO_TASK}/runner.cpp") - list(APPEND LIST_OF_EXEC_TESTS ${exec_func_tests}) - endif (USE_FUNC_TESTS) - if (USE_PERF_TESTS) - add_executable(${exec_perf_tests} ${PERF_TESTS_SOURCE_FILES} "${PATH_TO_TASK}/runner.cpp") - list(APPEND LIST_OF_EXEC_TESTS ${exec_perf_tests}) - endif (USE_PERF_TESTS) - - foreach (EXEC_FUNC ${LIST_OF_EXEC_TESTS}) - target_link_libraries(${EXEC_FUNC} PUBLIC ${exec_func_lib} core_module_lib) - target_link_libraries(${EXEC_FUNC} PUBLIC Threads::Threads) - target_link_libraries(${EXEC_FUNC} PUBLIC ${OpenMP_libomp_LIBRARY}) - if( MPI_COMPILE_FLAGS ) - set_target_properties(${EXEC_FUNC} PROPERTIES COMPILE_FLAGS "${MPI_COMPILE_FLAGS}") - endif( MPI_COMPILE_FLAGS ) - - if( MPI_LINK_FLAGS ) - set_target_properties(${EXEC_FUNC} PROPERTIES LINK_FLAGS "${MPI_LINK_FLAGS}") - endif( MPI_LINK_FLAGS ) - target_link_libraries(${EXEC_FUNC} PUBLIC ${MPI_LIBRARIES}) - - add_dependencies(${EXEC_FUNC} ppc_onetbb) - target_link_directories(${EXEC_FUNC} PUBLIC ${CMAKE_BINARY_DIR}/ppc_onetbb/install/lib) - if(NOT MSVC) - target_link_libraries(${EXEC_FUNC} PUBLIC ${PPC_TBB_LIB_NAME}) - endif() - - target_link_directories(stb_image INTERFACE ${CMAKE_SOURCE_DIR}/3rdparty/stb) - target_link_libraries(${EXEC_FUNC} PUBLIC stb_image) - - add_dependencies(${EXEC_FUNC} ppc_googletest) - target_link_directories(${EXEC_FUNC} PUBLIC "${CMAKE_BINARY_DIR}/ppc_googletest/install/lib") - target_link_libraries(${EXEC_FUNC} PUBLIC gtest gtest_main) - enable_testing() - add_test(NAME ${EXEC_FUNC} COMMAND ${EXEC_FUNC}) - - # Install the executable - install(TARGETS ${EXEC_FUNC} RUNTIME DESTINATION bin) - endforeach () - - # Install the library - install(TARGETS ${exec_func_lib} ARCHIVE DESTINATION lib LIBRARY DESTINATION lib) - - set(LIST_OF_EXEC_TESTS "") - set(LIB_SOURCE_FILES "") - set(SRC_RES "") - set(FUNC_TESTS_SOURCE_FILES "") - set(PERF_TESTS_SOURCE_FILES "") endforeach() -set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/revert-list.txt") -file(WRITE ${OUTPUT_FILE} "${CONTENT}") -message(STATUS "revert list") -foreach (dir_name ${LIST_OF_REVERTS}) - message(STATUS "-- ${dir_name}") - file(APPEND ${OUTPUT_FILE} "${dir_name}\n") -endforeach() +# ——— Install library target —————————————————————————————————————— +install(TARGETS ${name_lib} + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib +) diff --git a/tasks/all/example/func_tests/main.cpp b/tasks/all/example/func_tests/main.cpp deleted file mode 100644 index cc74607f0..000000000 --- a/tasks/all/example/func_tests/main.cpp +++ /dev/null @@ -1,75 +0,0 @@ -#include - -#include -#include -#include -#include -#include - -#include "all/example/include/ops_all.hpp" -#include "core/util/include/util.hpp" - -class NesterovATestTaskAll : public ::testing::TestWithParam { - protected: - void SetUp() override { - width = height = channels = -1; - std::string abs_path = ppc::util::GetAbsolutePath("all/example/data/pic_all.jpg"); - data = stbi_load(abs_path.c_str(), &width, &height, &channels, 0); - ASSERT_TRUE(data != nullptr) << "Failed to load image: " << stbi_failure_reason(); - img = std::vector(data, data + (static_cast(width) * height * channels)); - stbi_image_free(data); - - ASSERT_EQ(width, height); - } - - int width = -1, height = -1, channels = -1; - unsigned char* data = nullptr; - std::vector img; -}; - -TEST_P(NesterovATestTaskAll, MatmulFromPic) { - int divider = GetParam(); - const size_t k_count = (width + height) / divider; - - std::vector in(k_count * k_count, 0); - for (size_t i = 0; i < k_count; i++) { - in[(i * k_count) + i] = 1; - } - - nesterov_a_test_task_all::TestTaskALL test_task_all(in); - ASSERT_TRUE(test_task_all.Validation()); - test_task_all.PreProcessing(); - test_task_all.Run(); - test_task_all.PostProcessing(); - EXPECT_EQ(in, test_task_all.Get()); -} - -TEST_P(NesterovATestTaskAll, MatMulUtilFromPic) { - int divider = GetParam(); - const size_t k_count = (width + height) / divider; - - std::vector in(k_count * k_count, 0); - for (size_t i = 0; i < k_count; i++) { - in[(i * k_count) + i] = 1; - } - std::vector out(k_count * k_count, 0); - nesterov_a_test_task_all::MatMul(in, static_cast(k_count), out); - - EXPECT_EQ(in, out); -} - -TEST_P(NesterovATestTaskAll, MatMulTBBUtilFromPic) { - int divider = GetParam(); - const size_t k_count = (width + height) / divider; - - std::vector in(k_count * k_count, 0); - for (size_t i = 0; i < k_count; i++) { - in[(i * k_count) + i] = 1; - } - std::vector out(k_count * k_count, 0); - nesterov_a_test_task_all::MatMulTBB(in, static_cast(k_count), out); - - EXPECT_EQ(in, out); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(PicMatrixTests, NesterovATestTaskAll, ::testing::Values(5, 10)); diff --git a/tasks/all/example/include/ops_all.hpp b/tasks/all/example/include/ops_all.hpp deleted file mode 100644 index e5dad3bd0..000000000 --- a/tasks/all/example/include/ops_all.hpp +++ /dev/null @@ -1,26 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_all { - -void MatMul(const std::vector &in_vec, int rc_size, std::vector &out_vec); -void MatMulTBB(const std::vector &in_vec, int rc_size, std::vector &out_vec); - -class TestTaskALL : public ppc::core::Task { - public: - explicit TestTaskALL(const std::vector &in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; -}; - -} // namespace nesterov_a_test_task_all diff --git a/tasks/all/example/perf_tests/main.cpp b/tasks/all/example/perf_tests/main.cpp deleted file mode 100644 index 226b57fb8..000000000 --- a/tasks/all/example/perf_tests/main.cpp +++ /dev/null @@ -1,57 +0,0 @@ -#include -#include - -#include -#include -#include -#include - -#include "all/example/include/ops_all.hpp" -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" - -class NesterovAllRunTest : public ::testing::TestWithParam { - protected: - static constexpr size_t kCount = 400; - std::vector input_data; - - void SetUp() override { - input_data.assign(kCount * kCount, 0); - for (size_t i = 0; i < kCount; ++i) { - input_data[(i * kCount) + i] = 1; - } - } - - void ExecuteTest(ppc::core::PerfResults::TypeOfRunning mode) { - auto task = std::make_shared(input_data); - ppc::core::Perf perf(task); - - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto now = std::chrono::high_resolution_clock::now(); - auto ns = std::chrono::duration_cast(now - t0).count(); - return static_cast(ns) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf.PipelineRun(perf_attr); - } else { - perf.TaskRun(perf_attr); - } - - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - if (rank == 0) { - perf.PrintPerfStatistic(); - } - - ASSERT_EQ(input_data, task->Get()); - } -}; - -TEST_P(NesterovAllRunTest, RunModes) { ExecuteTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(RunModeTests, NesterovAllRunTest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/all/example/src/ops_all.cpp b/tasks/all/example/src/ops_all.cpp deleted file mode 100644 index e59799d5b..000000000 --- a/tasks/all/example/src/ops_all.cpp +++ /dev/null @@ -1,68 +0,0 @@ -#include "all/example/include/ops_all.hpp" - -#include - -#include -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "oneapi/tbb/parallel_for.h" - -void nesterov_a_test_task_all::MatMul(const std::vector &in_vec, int rc_size, std::vector &out_vec) { - for (int i = 0; i < rc_size; ++i) { - for (int j = 0; j < rc_size; ++j) { - out_vec[(i * rc_size) + j] = 0; - for (int k = 0; k < rc_size; ++k) { - out_vec[(i * rc_size) + j] += in_vec[(i * rc_size) + k] * in_vec[(k * rc_size) + j]; - } - } - } -} - -void nesterov_a_test_task_all::MatMulTBB(const std::vector &in_vec, int rc_size, std::vector &out_vec) { - tbb::parallel_for(0, ppc::util::GetNumThreads(), [&](int i) { MatMul(in_vec, rc_size - i, out_vec); }); - MatMul(in_vec, rc_size, out_vec); -} - -bool nesterov_a_test_task_all::TestTaskALL::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_all::TestTaskALL::PreProcessingImpl() { - // Init value for input and output - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_all::TestTaskALL::RunImpl() { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - if (rank == 0) { -#pragma omp parallel default(none) - { -#pragma omp critical - MatMul(input_, rc_size_, output_); - } - } else { - MatMulTBB(input_, rc_size_, output_); - } - - const int num_threads = ppc::util::GetNumThreads(); - std::vector threads(num_threads); - for (int i = 0; i < num_threads; i++) { - threads[i] = std::thread(MatMul, std::cref(input_), rc_size_, std::ref(output_)); - threads[i].join(); - } - - MPI_Barrier(MPI_COMM_WORLD); - return true; -} - -bool nesterov_a_test_task_all::TestTaskALL::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_all::TestTaskALL::Get() { return output_; } diff --git a/tasks/all/runner.cpp b/tasks/all/runner.cpp deleted file mode 100644 index 745fa84d9..000000000 --- a/tasks/all/runner.cpp +++ /dev/null @@ -1,93 +0,0 @@ -#include -#include - -#include -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "oneapi/tbb/global_control.h" - -class UnreadMessagesDetector : public ::testing::EmptyTestEventListener { - public: - UnreadMessagesDetector() = default; - - void OnTestEnd(const ::testing::TestInfo& /*test_info*/) override { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - - MPI_Barrier(MPI_COMM_WORLD); - - int flag = -1; - MPI_Status status; - - MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &flag, &status); - - if (flag != 0) { - fprintf( - stderr, - "[ PROCESS %d ] [ FAILED ] %s.%s: MPI message queue has an unread message from process %d with tag %d\n", - rank, "test_suite_name", "test_name", status.MPI_SOURCE, status.MPI_TAG); - MPI_Finalize(); - std::abort(); - } - - MPI_Barrier(MPI_COMM_WORLD); - } - - private: -}; - -class WorkerTestFailurePrinter : public ::testing::EmptyTestEventListener { - public: - explicit WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener> base) : base_(std::move(base)) {} - - void OnTestEnd(const ::testing::TestInfo& test_info) override { - if (test_info.result()->Passed()) { - return; - } - PrintProcessRank(); - base_->OnTestEnd(test_info); - } - - void OnTestPartResult(const ::testing::TestPartResult& test_part_result) override { - if (test_part_result.passed() || test_part_result.skipped()) { - return; - } - PrintProcessRank(); - base_->OnTestPartResult(test_part_result); - } - - private: - static void PrintProcessRank() { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - printf(" [ PROCESS %d ] ", rank); - } - - std::shared_ptr<::testing::TestEventListener> base_; -}; - -int main(int argc, char** argv) { - MPI_Init(&argc, &argv); - - // Limit the number of threads in TBB - tbb::global_control control(tbb::global_control::max_allowed_parallelism, ppc::util::GetNumThreads()); - - ::testing::InitGoogleTest(&argc, argv); - - auto& listeners = ::testing::UnitTest::GetInstance()->listeners(); - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - if (rank != 0 && (argc < 2 || argv[1] != std::string("--print-workers"))) { - auto* listener = listeners.Release(listeners.default_result_printer()); - listeners.Append(new WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener>(listener))); - } - listeners.Append(new UnreadMessagesDetector()); - auto status = RUN_ALL_TESTS(); - - MPI_Finalize(); - return status; -} diff --git a/tasks/tbb/runner.cpp b/tasks/common/runners/functional.cpp similarity index 73% rename from tasks/tbb/runner.cpp rename to tasks/common/runners/functional.cpp index 7c18b9654..4a8b0e286 100644 --- a/tasks/tbb/runner.cpp +++ b/tasks/common/runners/functional.cpp @@ -1,10 +1,14 @@ #include -#include +#include "core/runners/include/runners.hpp" #include "core/util/include/util.hpp" #include "oneapi/tbb/global_control.h" int main(int argc, char** argv) { + if (ppc::util::IsUnderMpirun()) { + return ppc::core::Init(argc, argv); + } + // Limit the number of threads in TBB tbb::global_control control(tbb::global_control::max_allowed_parallelism, ppc::util::GetNumThreads()); diff --git a/tasks/common/runners/performance.cpp b/tasks/common/runners/performance.cpp new file mode 100644 index 000000000..83b9bd261 --- /dev/null +++ b/tasks/common/runners/performance.cpp @@ -0,0 +1,3 @@ +#include "core/runners/include/runners.hpp" + +int main(int argc, char** argv) { return ppc::core::Init(argc, argv); } diff --git a/tasks/example_processes/common/include/common.hpp b/tasks/example_processes/common/include/common.hpp new file mode 100644 index 000000000..1e953987b --- /dev/null +++ b/tasks/example_processes/common/include/common.hpp @@ -0,0 +1,15 @@ +#pragma once + +#include +#include + +#include "core/task/include/task.hpp" + +namespace nesterov_a_test_task_processes { + +using InType = int; +using OutType = int; +using TestType = std::tuple; +using BaseTask = ppc::core::Task; + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/all/example/data/pic_all.jpg b/tasks/example_processes/data/pic.jpg similarity index 100% rename from tasks/all/example/data/pic_all.jpg rename to tasks/example_processes/data/pic.jpg diff --git a/tasks/example_processes/info.json b/tasks/example_processes/info.json new file mode 100644 index 000000000..cfb6b22c4 --- /dev/null +++ b/tasks/example_processes/info.json @@ -0,0 +1,8 @@ +{ + "student": { + "first_name": "", + "last_name": "", + "middle_name": "", + "group_number": "" + } +} diff --git a/tasks/example_processes/mpi/include/ops_mpi.hpp b/tasks/example_processes/mpi/include/ops_mpi.hpp new file mode 100644 index 000000000..7fc57c29f --- /dev/null +++ b/tasks/example_processes/mpi/include/ops_mpi.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_processes/common/include/common.hpp" + +namespace nesterov_a_test_task_processes { + +class NesterovATestTaskMPI : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kMPI; } + explicit NesterovATestTaskMPI(const InType &in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_processes/mpi/src/ops_mpi.cpp b/tasks/example_processes/mpi/src/ops_mpi.cpp new file mode 100644 index 000000000..1a6fb1687 --- /dev/null +++ b/tasks/example_processes/mpi/src/ops_mpi.cpp @@ -0,0 +1,71 @@ +#include "example_processes/mpi/include/ops_mpi.hpp" + +#include + +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_processes/common/include/common.hpp" + +namespace nesterov_a_test_task_processes { + +NesterovATestTaskMPI::NesterovATestTaskMPI(const InType &in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskMPI::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskMPI::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskMPI::RunImpl() { + auto input = GetInput(); + if (input == 0) { + return false; + } + + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + GetOutput() *= num_threads; + + int rank = 0; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + + if (rank == 0) { + GetOutput() /= num_threads; + } else { + int counter = 0; + for (int i = 0; i < num_threads; i++) { + counter++; + } + + if (counter != 0) { + GetOutput() /= counter; + } + } + + MPI_Barrier(MPI_COMM_WORLD); + return GetOutput() > 0; +} + +bool NesterovATestTaskMPI::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_processes/seq/include/ops_seq.hpp b/tasks/example_processes/seq/include/ops_seq.hpp new file mode 100644 index 000000000..3b8f03baf --- /dev/null +++ b/tasks/example_processes/seq/include/ops_seq.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_processes/common/include/common.hpp" + +namespace nesterov_a_test_task_processes { + +class NesterovATestTaskSEQ : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kSEQ; } + explicit NesterovATestTaskSEQ(const InType& in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_processes/seq/src/ops_seq.cpp b/tasks/example_processes/seq/src/ops_seq.cpp new file mode 100644 index 000000000..8528c11f4 --- /dev/null +++ b/tasks/example_processes/seq/src/ops_seq.cpp @@ -0,0 +1,59 @@ +#include "example_processes/seq/include/ops_seq.hpp" + +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_processes/common/include/common.hpp" + +namespace nesterov_a_test_task_processes { + +NesterovATestTaskSEQ::NesterovATestTaskSEQ(const InType& in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskSEQ::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskSEQ::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskSEQ::RunImpl() { + if (GetInput() == 0) { + return false; + } + + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + GetOutput() *= num_threads; + + int counter = 0; + for (int i = 0; i < num_threads; i++) { + counter++; + } + + if (counter != 0) { + GetOutput() /= counter; + } + return GetOutput() > 0; +} + +bool NesterovATestTaskSEQ::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_processes/settings.json b/tasks/example_processes/settings.json new file mode 100644 index 000000000..b1a0d5257 --- /dev/null +++ b/tasks/example_processes/settings.json @@ -0,0 +1,7 @@ +{ + "tasks_type": "processes", + "tasks": { + "mpi": "enabled", + "seq": "enabled" + } +} diff --git a/tasks/example_processes/tests/functional/main.cpp b/tasks/example_processes/tests/functional/main.cpp new file mode 100644 index 000000000..424f6588a --- /dev/null +++ b/tasks/example_processes/tests/functional/main.cpp @@ -0,0 +1,79 @@ +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core/util/include/func_test_util.hpp" +#include "core/util/include/util.hpp" +#include "example_processes/common/include/common.hpp" +#include "example_processes/mpi/include/ops_mpi.hpp" +#include "example_processes/seq/include/ops_seq.hpp" + +namespace nesterov_a_test_task_processes { + +class NesterovARunFuncTestsProcesses : public ppc::util::BaseRunFuncTests { + public: + static std::string PrintTestParam(const TestType &test_param) { + return std::to_string(std::get<0>(test_param)) + "_" + std::get<1>(test_param); + } + + protected: + void SetUp() override { + int width = -1; + int height = -1; + int channels = -1; + std::vector img; + // Read image + { + std::string abs_path = ppc::util::GetAbsoluteTaskPath(PPC_ID_example_processes, "pic.jpg"); + auto *data = stbi_load(abs_path.c_str(), &width, &height, &channels, 0); + if (data == nullptr) { + throw std::runtime_error("Failed to load image: " + std::string(stbi_failure_reason())); + } + img = std::vector(data, data + (static_cast(width * height * channels))); + stbi_image_free(data); + if (std::cmp_not_equal(width, height)) { + throw std::runtime_error("width != height: "); + } + } + + TestType params = std::get(GetParam()); + input_data_ = width - height + std::min(std::accumulate(img.begin(), img.end(), 0), channels); + } + + bool CheckTestOutputData(OutType &output_data) final { return (input_data_ == output_data); } + + InType GetTestInputData() final { return input_data_; } + + private: + InType input_data_ = 0; +}; + +namespace { + +TEST_P(NesterovARunFuncTestsProcesses, MatmulFromPic) { ExecuteTest(GetParam()); } + +const std::array kTestParam = {std::make_tuple(3, "3"), std::make_tuple(5, "5"), std::make_tuple(7, "7")}; + +const auto kTestTasksList = + std::tuple_cat(ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_processes), + ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_processes)); + +const auto kGtestValues = ppc::util::ExpandToValues(kTestTasksList); + +const auto kPerfTestName = NesterovARunFuncTestsProcesses::PrintFuncTestName; + +INSTANTIATE_TEST_SUITE_P_NOLINT(PicMatrixTests, NesterovARunFuncTestsProcesses, kGtestValues, kPerfTestName); + +} // namespace + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_processes/tests/performance/main.cpp b/tasks/example_processes/tests/performance/main.cpp new file mode 100644 index 000000000..3c22a742b --- /dev/null +++ b/tasks/example_processes/tests/performance/main.cpp @@ -0,0 +1,33 @@ +#include + +#include "core/util/include/perf_test_util.hpp" +#include "core/util/include/util.hpp" +#include "example_processes/common/include/common.hpp" +#include "example_processes/mpi/include/ops_mpi.hpp" +#include "example_processes/seq/include/ops_seq.hpp" + +namespace nesterov_a_test_task_processes { + +class ExampleRunPerfTestProcesses : public ppc::util::BaseRunPerfTests { + const int kCount_ = 200; + InType input_data_{}; + + void SetUp() override { input_data_ = kCount_; } + + bool CheckTestOutputData(OutType& output_data) final { return input_data_ == output_data; } + + InType GetTestInputData() final { return input_data_; } +}; + +TEST_P(ExampleRunPerfTestProcesses, RunPerfModes) { ExecuteTest(GetParam()); } + +const auto kAllPerfTasks = + ppc::util::MakeAllPerfTasks(PPC_SETTINGS_example_processes); + +const auto kGtestValues = ppc::util::TupleToGTestValues(kAllPerfTasks); + +const auto kPerfTestName = ExampleRunPerfTestProcesses::CustomPerfTestName; + +INSTANTIATE_TEST_SUITE_P_NOLINT(RunModeTests, ExampleRunPerfTestProcesses, kGtestValues, kPerfTestName); + +} // namespace nesterov_a_test_task_processes diff --git a/tasks/example_threads/all/include/ops_all.hpp b/tasks/example_threads/all/include/ops_all.hpp new file mode 100644 index 000000000..9a2deade2 --- /dev/null +++ b/tasks/example_threads/all/include/ops_all.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovATestTaskALL : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kALL; } + explicit NesterovATestTaskALL(const InType &in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/all/src/ops_all.cpp b/tasks/example_threads/all/src/ops_all.cpp new file mode 100644 index 000000000..07d899c34 --- /dev/null +++ b/tasks/example_threads/all/src/ops_all.cpp @@ -0,0 +1,84 @@ +#include "example_threads/all/include/ops_all.hpp" + +#include + +#include +#include +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_threads/common/include/common.hpp" +#include "oneapi/tbb/parallel_for.h" + +namespace nesterov_a_test_task_threads { + +NesterovATestTaskALL::NesterovATestTaskALL(const InType &in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskALL::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskALL::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskALL::RunImpl() { + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + { + GetOutput() *= num_threads; + + int rank = -1; + MPI_Comm_rank(MPI_COMM_WORLD, &rank); + if (rank == 0) { + std::atomic counter(0); +#pragma omp parallel default(none) shared(counter) num_threads(ppc::util::GetNumThreads()) + counter++; + + GetOutput() /= counter; + } else { + GetOutput() /= num_threads; + } + } + + { + GetOutput() *= num_threads; + std::vector threads(num_threads); + std::atomic counter(0); + for (int i = 0; i < num_threads; i++) { + threads[i] = std::thread([&]() { counter++; }); + threads[i].join(); + } + GetOutput() /= counter; + } + + { + GetOutput() *= num_threads; + std::atomic counter(0); + tbb::parallel_for(0, ppc::util::GetNumThreads(), [&](int /*i*/) { counter++; }); + GetOutput() /= counter; + } + MPI_Barrier(MPI_COMM_WORLD); + return GetOutput() > 0; +} + +bool NesterovATestTaskALL::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/common/include/common.hpp b/tasks/example_threads/common/include/common.hpp new file mode 100644 index 000000000..21a29015a --- /dev/null +++ b/tasks/example_threads/common/include/common.hpp @@ -0,0 +1,15 @@ +#pragma once + +#include +#include + +#include "core/task/include/task.hpp" + +namespace nesterov_a_test_task_threads { + +using InType = int; +using OutType = int; +using TestType = std::tuple; +using BaseTask = ppc::core::Task; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/data/pic.jpg b/tasks/example_threads/data/pic.jpg new file mode 100644 index 000000000..344580234 Binary files /dev/null and b/tasks/example_threads/data/pic.jpg differ diff --git a/tasks/example_threads/info.json b/tasks/example_threads/info.json new file mode 100644 index 000000000..cfb6b22c4 --- /dev/null +++ b/tasks/example_threads/info.json @@ -0,0 +1,8 @@ +{ + "student": { + "first_name": "", + "last_name": "", + "middle_name": "", + "group_number": "" + } +} diff --git a/tasks/example_threads/omp/include/ops_omp.hpp b/tasks/example_threads/omp/include/ops_omp.hpp new file mode 100644 index 000000000..1fc5cd4c8 --- /dev/null +++ b/tasks/example_threads/omp/include/ops_omp.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovATestTaskOMP : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kOMP; } + explicit NesterovATestTaskOMP(const InType& in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/omp/src/ops_omp.cpp b/tasks/example_threads/omp/src/ops_omp.cpp new file mode 100644 index 000000000..b10e5139f --- /dev/null +++ b/tasks/example_threads/omp/src/ops_omp.cpp @@ -0,0 +1,53 @@ +#include "example_threads/omp/include/ops_omp.hpp" + +#include +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +NesterovATestTaskOMP::NesterovATestTaskOMP(const InType& in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskOMP::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskOMP::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskOMP::RunImpl() { + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + GetOutput() *= num_threads; + + std::atomic counter(0); +#pragma omp parallel default(none) shared(counter) num_threads(ppc::util::GetNumThreads()) + counter++; + + GetOutput() /= counter; + return GetOutput() > 0; +} + +bool NesterovATestTaskOMP::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/seq/include/ops_seq.hpp b/tasks/example_threads/seq/include/ops_seq.hpp new file mode 100644 index 000000000..dccae8530 --- /dev/null +++ b/tasks/example_threads/seq/include/ops_seq.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovATestTaskSEQ : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kSEQ; } + explicit NesterovATestTaskSEQ(const InType& in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/seq/src/ops_seq.cpp b/tasks/example_threads/seq/src/ops_seq.cpp new file mode 100644 index 000000000..073ab1c61 --- /dev/null +++ b/tasks/example_threads/seq/src/ops_seq.cpp @@ -0,0 +1,59 @@ +#include "example_threads/seq/include/ops_seq.hpp" + +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +NesterovATestTaskSEQ::NesterovATestTaskSEQ(const InType& in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskSEQ::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskSEQ::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskSEQ::RunImpl() { + if (GetInput() == 0) { + return false; + } + + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + GetOutput() *= num_threads; + + int counter = 0; + for (int i = 0; i < num_threads; i++) { + counter++; + } + + if (counter != 0) { + GetOutput() /= counter; + } + return GetOutput() > 0; +} + +bool NesterovATestTaskSEQ::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/settings.json b/tasks/example_threads/settings.json new file mode 100644 index 000000000..f8c285c85 --- /dev/null +++ b/tasks/example_threads/settings.json @@ -0,0 +1,10 @@ +{ + "tasks_type": "threads", + "tasks": { + "all": "enabled", + "omp": "enabled", + "seq": "enabled", + "stl": "enabled", + "tbb": "enabled" + } +} diff --git a/tasks/example_threads/stl/include/ops_stl.hpp b/tasks/example_threads/stl/include/ops_stl.hpp new file mode 100644 index 000000000..554ac1e37 --- /dev/null +++ b/tasks/example_threads/stl/include/ops_stl.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovATestTaskSTL : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kSTL; } + explicit NesterovATestTaskSTL(const InType& in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/stl/src/ops_stl.cpp b/tasks/example_threads/stl/src/ops_stl.cpp new file mode 100644 index 000000000..ea23bdb99 --- /dev/null +++ b/tasks/example_threads/stl/src/ops_stl.cpp @@ -0,0 +1,57 @@ +#include "example_threads/stl/include/ops_stl.hpp" + +#include +#include +#include +#include +#include + +#include "core/util/include/util.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +NesterovATestTaskSTL::NesterovATestTaskSTL(const InType &in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskSTL::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskSTL::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskSTL::RunImpl() { + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + std::vector threads(num_threads); + GetOutput() *= num_threads; + + std::atomic counter(0); + for (int i = 0; i < num_threads; i++) { + threads[i] = std::thread([&]() { counter++; }); + threads[i].join(); + } + + GetOutput() /= counter; + return GetOutput() > 0; +} + +bool NesterovATestTaskSTL::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/tbb/include/ops_tbb.hpp b/tasks/example_threads/tbb/include/ops_tbb.hpp new file mode 100644 index 000000000..d88ffafb5 --- /dev/null +++ b/tasks/example_threads/tbb/include/ops_tbb.hpp @@ -0,0 +1,20 @@ +#pragma once + +#include "core/task/include/task.hpp" +#include "example_threads/common/include/common.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovATestTaskTBB : public BaseTask { + public: + static constexpr ppc::core::TypeOfTask GetStaticTypeOfTask() { return ppc::core::TypeOfTask::kTBB; } + explicit NesterovATestTaskTBB(const InType& in); + + private: + bool ValidationImpl() override; + bool PreProcessingImpl() override; + bool RunImpl() override; + bool PostProcessingImpl() override; +}; + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/tbb/src/ops_tbb.cpp b/tasks/example_threads/tbb/src/ops_tbb.cpp new file mode 100644 index 000000000..0f057e676 --- /dev/null +++ b/tasks/example_threads/tbb/src/ops_tbb.cpp @@ -0,0 +1,55 @@ +#include "example_threads/tbb/include/ops_tbb.hpp" + +#include + +#include +#include +#include +#include +#include + +#include "example_threads/common/include/common.hpp" +#include "oneapi/tbb/parallel_for.h" + +namespace nesterov_a_test_task_threads { + +NesterovATestTaskTBB::NesterovATestTaskTBB(const InType &in) { + SetTypeOfTask(GetStaticTypeOfTask()); + GetInput() = in; + GetOutput() = 0; +} + +bool NesterovATestTaskTBB::ValidationImpl() { return (GetInput() > 0) && (GetOutput() == 0); } + +bool NesterovATestTaskTBB::PreProcessingImpl() { + GetOutput() = 2 * GetInput(); + return GetOutput() > 0; +} + +bool NesterovATestTaskTBB::RunImpl() { + for (InType i = 0; i < GetInput(); i++) { + for (InType j = 0; j < GetInput(); j++) { + for (InType k = 0; k < GetInput(); k++) { + std::vector tmp(i + j + k, 1); + GetOutput() += std::accumulate(tmp.begin(), tmp.end(), 0); + GetOutput() -= i + j + k; + } + } + } + + const int num_threads = ppc::util::GetNumThreads(); + GetOutput() *= num_threads; + + std::atomic counter(0); + tbb::parallel_for(0, ppc::util::GetNumThreads(), [&](int /*i*/) { counter++; }); + + GetOutput() /= counter; + return GetOutput() > 0; +} + +bool NesterovATestTaskTBB::PostProcessingImpl() { + GetOutput() -= GetInput(); + return GetOutput() > 0; +} + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/tests/functional/main.cpp b/tasks/example_threads/tests/functional/main.cpp new file mode 100644 index 000000000..fb5f56365 --- /dev/null +++ b/tasks/example_threads/tests/functional/main.cpp @@ -0,0 +1,85 @@ +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core/util/include/func_test_util.hpp" +#include "core/util/include/util.hpp" +#include "example_threads/all/include/ops_all.hpp" +#include "example_threads/common/include/common.hpp" +#include "example_threads/omp/include/ops_omp.hpp" +#include "example_threads/seq/include/ops_seq.hpp" +#include "example_threads/stl/include/ops_stl.hpp" +#include "example_threads/tbb/include/ops_tbb.hpp" + +namespace nesterov_a_test_task_threads { + +class NesterovARunFuncTestsThreads : public ppc::util::BaseRunFuncTests { + public: + static std::string PrintTestParam(const TestType &test_param) { + return std::to_string(std::get<0>(test_param)) + "_" + std::get<1>(test_param); + } + + protected: + void SetUp() override { + int width = -1; + int height = -1; + int channels = -1; + std::vector img; + // Read image + { + std::string abs_path = ppc::util::GetAbsoluteTaskPath(std::string(PPC_ID_example_threads), "pic.jpg"); + auto *data = stbi_load(abs_path.c_str(), &width, &height, &channels, 0); + if (data == nullptr) { + throw std::runtime_error("Failed to load image: " + std::string(stbi_failure_reason())); + } + img = std::vector(data, data + (static_cast(width * height * channels))); + stbi_image_free(data); + if (std::cmp_not_equal(width, height)) { + throw std::runtime_error("width != height: "); + } + } + + TestType params = std::get(GetParam()); + input_data_ = width - height + std::min(std::accumulate(img.begin(), img.end(), 0), channels); + } + + bool CheckTestOutputData(OutType &output_data) final { return (input_data_ == output_data); } + + InType GetTestInputData() final { return input_data_; } + + private: + InType input_data_ = 0; +}; + +namespace { + +TEST_P(NesterovARunFuncTestsThreads, MatmulFromPic) { ExecuteTest(GetParam()); } + +const std::array kTestParam = {std::make_tuple(3, "3"), std::make_tuple(5, "5"), std::make_tuple(7, "7")}; + +const auto kTestTasksList = + std::tuple_cat(ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_threads), + ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_threads), + ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_threads), + ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_threads), + ppc::util::AddFuncTask(kTestParam, PPC_SETTINGS_example_threads)); + +const auto kGtestValues = ppc::util::ExpandToValues(kTestTasksList); + +const auto kPerfTestName = NesterovARunFuncTestsThreads::PrintFuncTestName; + +INSTANTIATE_TEST_SUITE_P_NOLINT(PicMatrixTests, NesterovARunFuncTestsThreads, kGtestValues, kPerfTestName); + +} // namespace + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/example_threads/tests/performance/main.cpp b/tasks/example_threads/tests/performance/main.cpp new file mode 100644 index 000000000..a982cf4ac --- /dev/null +++ b/tasks/example_threads/tests/performance/main.cpp @@ -0,0 +1,37 @@ +#include + +#include "core/util/include/perf_test_util.hpp" +#include "core/util/include/util.hpp" +#include "example_threads/all/include/ops_all.hpp" +#include "example_threads/common/include/common.hpp" +#include "example_threads/omp/include/ops_omp.hpp" +#include "example_threads/seq/include/ops_seq.hpp" +#include "example_threads/stl/include/ops_stl.hpp" +#include "example_threads/tbb/include/ops_tbb.hpp" + +namespace nesterov_a_test_task_threads { + +class ExampleRunPerfTestThreads : public ppc::util::BaseRunPerfTests { + const int kCount_ = 200; + InType input_data_{}; + + void SetUp() override { input_data_ = kCount_; } + + bool CheckTestOutputData(OutType& output_data) final { return input_data_ == output_data; } + + InType GetTestInputData() final { return input_data_; } +}; + +TEST_P(ExampleRunPerfTestThreads, RunPerfModes) { ExecuteTest(GetParam()); } + +const auto kAllPerfTasks = + ppc::util::MakeAllPerfTasks(PPC_SETTINGS_example_threads); + +const auto kGtestValues = ppc::util::TupleToGTestValues(kAllPerfTasks); + +const auto kPerfTestName = ExampleRunPerfTestThreads::CustomPerfTestName; + +INSTANTIATE_TEST_SUITE_P_NOLINT(RunModeTests, ExampleRunPerfTestThreads, kGtestValues, kPerfTestName); + +} // namespace nesterov_a_test_task_threads diff --git a/tasks/mpi/example/data/test.txt b/tasks/mpi/example/data/test.txt deleted file mode 100644 index 105d7d9ad..000000000 --- a/tasks/mpi/example/data/test.txt +++ /dev/null @@ -1 +0,0 @@ -100 \ No newline at end of file diff --git a/tasks/mpi/example/func_tests/main.cpp b/tasks/mpi/example/func_tests/main.cpp deleted file mode 100644 index 6d7bfc645..000000000 --- a/tasks/mpi/example/func_tests/main.cpp +++ /dev/null @@ -1,72 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "mpi/example/include/ops_mpi.hpp" - -class NesterovATestTaskMPI : public ::testing::TestWithParam { - protected: - void SetUp() override { - std::ifstream test_file(ppc::util::GetAbsolutePath("mpi/example/data/test.txt")); - ASSERT_TRUE(test_file.is_open()) << "Failed to open input file"; - std::string line; - std::getline(test_file, line); - test_file.close(); - base_count = std::stoi(line); - } - - [[nodiscard]] size_t GetCount() const { return static_cast(base_count * GetParam()); } - - int base_count = 0; -}; - -TEST_P(NesterovATestTaskMPI, MatmulFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - nesterov_a_test_task_mpi::TestTaskMPI test_task_mpi(in); - ASSERT_TRUE(test_task_mpi.Validation()); - test_task_mpi.PreProcessing(); - test_task_mpi.Run(); - test_task_mpi.PostProcessing(); - - EXPECT_EQ(in, test_task_mpi.Get()); -} - -TEST_P(NesterovATestTaskMPI, MultiplyRowMajorUtilTestFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - std::vector out(count * count, 0); - nesterov_a_test_task_mpi::MultiplyRowMajor(in, out, static_cast(count)); - - EXPECT_EQ(in, out); -} - -TEST_P(NesterovATestTaskMPI, MultiplyColumnMajorUtilTestFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - std::vector out(count * count, 0); - nesterov_a_test_task_mpi::MultiplyColumnMajor(in, out, static_cast(count)); - - EXPECT_EQ(in, out); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(FileMatrixTestsMPI, NesterovATestTaskMPI, ::testing::Values(0.5, 1.0)); diff --git a/tasks/mpi/example/include/ops_mpi.hpp b/tasks/mpi/example/include/ops_mpi.hpp deleted file mode 100644 index cd1a3f3ca..000000000 --- a/tasks/mpi/example/include/ops_mpi.hpp +++ /dev/null @@ -1,28 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_mpi { - -void MultiplyRowMajor(const std::vector &in, std::vector &out, int rc_size); -void MultiplyColumnMajor(const std::vector &in, std::vector &out, int rc_size); - -class TestTaskMPI : public ppc::core::Task { - public: - explicit TestTaskMPI(const std::vector &in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; - - void MultiplyMatrixBasedOnRank(); -}; - -} // namespace nesterov_a_test_task_mpi diff --git a/tasks/mpi/example/perf_tests/main.cpp b/tasks/mpi/example/perf_tests/main.cpp deleted file mode 100644 index 2ed345abe..000000000 --- a/tasks/mpi/example/perf_tests/main.cpp +++ /dev/null @@ -1,59 +0,0 @@ -#include -#include - -#include -#include -#include -#include - -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" -#include "mpi/example/include/ops_mpi.hpp" - -class NesterovATaskMPITest : public ::testing::TestWithParam { - protected: - static void RunTest(ppc::core::PerfResults::TypeOfRunning mode) { - constexpr size_t kCount = 500; - - // Create data - std::vector in(kCount * kCount, 0); - for (size_t i = 0; i < kCount; i++) { - in[(i * kCount) + i] = 1; - } - - // Create Task - auto test_task_mpi = std::make_shared(in); - - // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task_mpi); - - // Create Perf attributes - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto current_time_point = std::chrono::high_resolution_clock::now(); - auto duration = std::chrono::duration_cast(current_time_point - t0).count(); - return static_cast(duration) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf_analyzer.PipelineRun(perf_attr); - } else { - perf_analyzer.TaskRun(perf_attr); - } - - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - if (rank == 0) { - perf_analyzer.PrintPerfStatistic(); - } - - ASSERT_EQ(in, test_task_mpi->Get()); - } -}; - -TEST_P(NesterovATaskMPITest, RunModes) { RunTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(NesterovATests, NesterovATaskMPITest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/mpi/example/src/ops_mpi.cpp b/tasks/mpi/example/src/ops_mpi.cpp deleted file mode 100644 index 43821a5d5..000000000 --- a/tasks/mpi/example/src/ops_mpi.cpp +++ /dev/null @@ -1,60 +0,0 @@ -#include "mpi/example/include/ops_mpi.hpp" - -#include - -#include -#include -#include - -bool nesterov_a_test_task_mpi::TestTaskMPI::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_mpi::TestTaskMPI::PreProcessingImpl() { - // Init value for input and output - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_mpi::TestTaskMPI::RunImpl() { - MultiplyMatrixBasedOnRank(); - return true; -} - -void nesterov_a_test_task_mpi::TestTaskMPI::MultiplyMatrixBasedOnRank() { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - - if (rank == 0) { - MultiplyRowMajor(input_, output_, rc_size_); - } else { - MultiplyColumnMajor(input_, output_, rc_size_); - } - MPI_Barrier(MPI_COMM_WORLD); -} - -void nesterov_a_test_task_mpi::MultiplyRowMajor(const std::vector &in, std::vector &out, int rc_size) { - for (int i = 0; i < rc_size; ++i) { - for (int j = 0; j < rc_size; ++j) { - for (int k = 0; k < rc_size; ++k) { - out[(i * rc_size) + j] += in[(i * rc_size) + k] * in[(k * rc_size) + j]; - } - } - } -} - -void nesterov_a_test_task_mpi::MultiplyColumnMajor(const std::vector &in, std::vector &out, int rc_size) { - for (int j = 0; j < rc_size; ++j) { - for (int k = 0; k < rc_size; ++k) { - for (int i = 0; i < rc_size; ++i) { - out[(i * rc_size) + j] += in[(i * rc_size) + k] * in[(k * rc_size) + j]; - } - } - } -} - -bool nesterov_a_test_task_mpi::TestTaskMPI::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_mpi::TestTaskMPI::Get() { return output_; } diff --git a/tasks/mpi/runner.cpp b/tasks/mpi/runner.cpp deleted file mode 100644 index 04bd520af..000000000 --- a/tasks/mpi/runner.cpp +++ /dev/null @@ -1,87 +0,0 @@ -#include -#include - -#include -#include -#include -#include -#include - -class UnreadMessagesDetector : public ::testing::EmptyTestEventListener { - public: - UnreadMessagesDetector() = default; - - void OnTestEnd(const ::testing::TestInfo& /*test_info*/) override { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - - MPI_Barrier(MPI_COMM_WORLD); - - int flag = -1; - MPI_Status status; - - MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &flag, &status); - - if (flag != 0) { - fprintf( - stderr, - "[ PROCESS %d ] [ FAILED ] %s.%s: MPI message queue has an unread message from process %d with tag %d\n", - rank, "test_suite_name", "test_name", status.MPI_SOURCE, status.MPI_TAG); - MPI_Finalize(); - std::abort(); - } - - MPI_Barrier(MPI_COMM_WORLD); - } - - private: -}; - -class WorkerTestFailurePrinter : public ::testing::EmptyTestEventListener { - public: - explicit WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener> base) : base_(std::move(base)) {} - - void OnTestEnd(const ::testing::TestInfo& test_info) override { - if (test_info.result()->Passed()) { - return; - } - PrintProcessRank(); - base_->OnTestEnd(test_info); - } - - void OnTestPartResult(const ::testing::TestPartResult& test_part_result) override { - if (test_part_result.passed() || test_part_result.skipped()) { - return; - } - PrintProcessRank(); - base_->OnTestPartResult(test_part_result); - } - - private: - static void PrintProcessRank() { - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - printf(" [ PROCESS %d ] ", rank); - } - - std::shared_ptr<::testing::TestEventListener> base_; -}; - -int main(int argc, char** argv) { - MPI_Init(&argc, &argv); - - ::testing::InitGoogleTest(&argc, argv); - - auto& listeners = ::testing::UnitTest::GetInstance()->listeners(); - int rank = -1; - MPI_Comm_rank(MPI_COMM_WORLD, &rank); - if (rank != 0 && (argc < 2 || argv[1] != std::string("--print-workers"))) { - auto* listener = listeners.Release(listeners.default_result_printer()); - listeners.Append(new WorkerTestFailurePrinter(std::shared_ptr<::testing::TestEventListener>(listener))); - } - listeners.Append(new UnreadMessagesDetector()); - auto status = RUN_ALL_TESTS(); - - MPI_Finalize(); - return status; -} diff --git a/tasks/omp/example/data/test.txt b/tasks/omp/example/data/test.txt deleted file mode 100644 index 105d7d9ad..000000000 --- a/tasks/omp/example/data/test.txt +++ /dev/null @@ -1 +0,0 @@ -100 \ No newline at end of file diff --git a/tasks/omp/example/func_tests/main.cpp b/tasks/omp/example/func_tests/main.cpp deleted file mode 100644 index 25c5a480b..000000000 --- a/tasks/omp/example/func_tests/main.cpp +++ /dev/null @@ -1,44 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "omp/example/include/ops_omp.hpp" - -class NesterovATestTaskOMP : public ::testing::TestWithParam { - protected: - void SetUp() override { - std::ifstream test_file(ppc::util::GetAbsolutePath("omp/example/data/test.txt")); - ASSERT_TRUE(test_file.is_open()) << "Failed to open input file"; - std::string line; - std::getline(test_file, line); - test_file.close(); - base_count = std::stoi(line); - } - - [[nodiscard]] size_t GetCount() const { return static_cast(base_count * GetParam()); } - - int base_count = 0; -}; - -TEST_P(NesterovATestTaskOMP, MatmulFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - nesterov_a_test_task_omp::TestTaskOpenMP test_task_omp(in); - ASSERT_TRUE(test_task_omp.Validation()); - test_task_omp.PreProcessing(); - test_task_omp.Run(); - test_task_omp.PostProcessing(); - - EXPECT_EQ(in, test_task_omp.Get()); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(FileMatrixTestsOMP, NesterovATestTaskOMP, ::testing::Values(0.5, 1.0)); diff --git a/tasks/omp/example/include/ops_omp.hpp b/tasks/omp/example/include/ops_omp.hpp deleted file mode 100644 index ea47385f5..000000000 --- a/tasks/omp/example/include/ops_omp.hpp +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_omp { - -class TestTaskOpenMP : public ppc::core::Task { - public: - explicit TestTaskOpenMP(const std::vector& in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; -}; - -} // namespace nesterov_a_test_task_omp diff --git a/tasks/omp/example/perf_tests/main.cpp b/tasks/omp/example/perf_tests/main.cpp deleted file mode 100644 index 72a918152..000000000 --- a/tasks/omp/example/perf_tests/main.cpp +++ /dev/null @@ -1,54 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" -#include "omp/example/include/ops_omp.hpp" - -class NesterovTaskOMPTest : public ::testing::TestWithParam { - protected: - static void RunTest(ppc::core::PerfResults::TypeOfRunning mode) { - constexpr size_t kCount = 300; - - // Create data - std::vector in(kCount * kCount, 0); - for (size_t i = 0; i < kCount; i++) { - in[(i * kCount) + i] = 1; - } - - // Create Task - auto test_task_omp = std::make_shared(in); - - // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task_omp); - - // Create Perf attributes - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto current_time_point = std::chrono::high_resolution_clock::now(); - auto duration = std::chrono::duration_cast(current_time_point - t0).count(); - return static_cast(duration) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf_analyzer.PipelineRun(perf_attr); - } else { - perf_analyzer.TaskRun(perf_attr); - } - - perf_analyzer.PrintPerfStatistic(); - - ASSERT_EQ(in, test_task_omp->Get()); - } -}; - -TEST_P(NesterovTaskOMPTest, RunModes) { RunTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(NesterovOMPTests, NesterovTaskOMPTest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/omp/example/src/ops_omp.cpp b/tasks/omp/example/src/ops_omp.cpp deleted file mode 100644 index 380be6aa8..000000000 --- a/tasks/omp/example/src/ops_omp.cpp +++ /dev/null @@ -1,39 +0,0 @@ -#include "omp/example/include/ops_omp.hpp" - -#include -#include -#include - -bool nesterov_a_test_task_omp::TestTaskOpenMP::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_omp::TestTaskOpenMP::PreProcessingImpl() { - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_omp::TestTaskOpenMP::RunImpl() { -#pragma omp parallel default(none) - { -#pragma omp critical - { - // Multiply matrices - for (int i = 0; i < rc_size_; ++i) { - for (int j = 0; j < rc_size_; ++j) { - output_[(i * rc_size_) + j] = 0; - for (int k = 0; k < rc_size_; ++k) { - output_[(i * rc_size_) + j] += input_[(i * rc_size_) + k] * input_[(k * rc_size_) + j]; - } - } - } - } - } - return true; -} - -bool nesterov_a_test_task_omp::TestTaskOpenMP::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_omp::TestTaskOpenMP::Get() { return output_; } diff --git a/tasks/omp/runner.cpp b/tasks/omp/runner.cpp deleted file mode 100644 index 4d820af77..000000000 --- a/tasks/omp/runner.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/tasks/seq/example/data/test.txt b/tasks/seq/example/data/test.txt deleted file mode 100644 index 105d7d9ad..000000000 --- a/tasks/seq/example/data/test.txt +++ /dev/null @@ -1 +0,0 @@ -100 \ No newline at end of file diff --git a/tasks/seq/example/func_tests/main.cpp b/tasks/seq/example/func_tests/main.cpp deleted file mode 100644 index 412aa4f60..000000000 --- a/tasks/seq/example/func_tests/main.cpp +++ /dev/null @@ -1,43 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "seq/example/include/ops_seq.hpp" - -class NesterovATestTaskSeq : public ::testing::TestWithParam { - protected: - void SetUp() override { - std::ifstream test_file(ppc::util::GetAbsolutePath("seq/example/data/test.txt")); - ASSERT_TRUE(test_file.is_open()) << "Failed to open input file"; - std::string line; - std::getline(test_file, line); - test_file.close(); - base_count = std::stoi(line); - } - - [[nodiscard]] size_t GetCount() const { return static_cast(base_count * GetParam()); } - - int base_count = 0; -}; - -TEST_P(NesterovATestTaskSeq, MatmulFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; i++) { - in[(i * count) + i] = 1; - } - - nesterov_a_test_task_seq::TestTaskSequential test_task_sequential(in); - ASSERT_TRUE(test_task_sequential.Validation()); - test_task_sequential.PreProcessing(); - test_task_sequential.Run(); - test_task_sequential.PostProcessing(); - EXPECT_EQ(in, test_task_sequential.Get()); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(FileMatrixTests, NesterovATestTaskSeq, ::testing::Values(0.5, 1.0)); diff --git a/tasks/seq/example/include/ops_seq.hpp b/tasks/seq/example/include/ops_seq.hpp deleted file mode 100644 index 591b54861..000000000 --- a/tasks/seq/example/include/ops_seq.hpp +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_seq { - -class TestTaskSequential : public ppc::core::Task { - public: - explicit TestTaskSequential(const std::vector& in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; -}; - -} // namespace nesterov_a_test_task_seq diff --git a/tasks/seq/example/perf_tests/main.cpp b/tasks/seq/example/perf_tests/main.cpp deleted file mode 100644 index f65f1bb1c..000000000 --- a/tasks/seq/example/perf_tests/main.cpp +++ /dev/null @@ -1,54 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" -#include "seq/example/include/ops_seq.hpp" - -class NesterovTaskSeqTest : public ::testing::TestWithParam { - protected: - static void RunTest(ppc::core::PerfResults::TypeOfRunning mode) { - constexpr size_t kCount = 500; - - // Create data - std::vector in(kCount * kCount, 0); - for (size_t i = 0; i < kCount; i++) { - in[(i * kCount) + i] = 1; - } - - // Create Task - auto test_task_sequential = std::make_shared(in); - - // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task_sequential); - - // Create Perf attributes - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto current_time_point = std::chrono::high_resolution_clock::now(); - auto duration = std::chrono::duration_cast(current_time_point - t0).count(); - return static_cast(duration) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf_analyzer.PipelineRun(perf_attr); - } else { - perf_analyzer.TaskRun(perf_attr); - } - - perf_analyzer.PrintPerfStatistic(); - - ASSERT_EQ(in, test_task_sequential->Get()); - } -}; - -TEST_P(NesterovTaskSeqTest, RunModes) { RunTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(NesterovSeqTests, NesterovTaskSeqTest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/seq/example/src/ops_seq.cpp b/tasks/seq/example/src/ops_seq.cpp deleted file mode 100644 index 40df2e006..000000000 --- a/tasks/seq/example/src/ops_seq.cpp +++ /dev/null @@ -1,32 +0,0 @@ -#include "seq/example/include/ops_seq.hpp" - -#include -#include -#include - -bool nesterov_a_test_task_seq::TestTaskSequential::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_seq::TestTaskSequential::PreProcessingImpl() { - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_seq::TestTaskSequential::RunImpl() { - // Multiply matrices - for (int i = 0; i < rc_size_; ++i) { - for (int j = 0; j < rc_size_; ++j) { - for (int k = 0; k < rc_size_; ++k) { - output_[(i * rc_size_) + j] += input_[(i * rc_size_) + k] * input_[(k * rc_size_) + j]; - } - } - } - return true; -} - -bool nesterov_a_test_task_seq::TestTaskSequential::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_seq::TestTaskSequential::Get() { return output_; } diff --git a/tasks/seq/runner.cpp b/tasks/seq/runner.cpp deleted file mode 100644 index 4d820af77..000000000 --- a/tasks/seq/runner.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/tasks/stl/example/data/test.txt b/tasks/stl/example/data/test.txt deleted file mode 100644 index 105d7d9ad..000000000 --- a/tasks/stl/example/data/test.txt +++ /dev/null @@ -1 +0,0 @@ -100 \ No newline at end of file diff --git a/tasks/stl/example/func_tests/main.cpp b/tasks/stl/example/func_tests/main.cpp deleted file mode 100644 index 75beec1d6..000000000 --- a/tasks/stl/example/func_tests/main.cpp +++ /dev/null @@ -1,44 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "stl/example/include/ops_stl.hpp" - -class NesterovATestTaskSTL : public ::testing::TestWithParam { - protected: - void SetUp() override { - std::ifstream test_file(ppc::util::GetAbsolutePath("stl/example/data/test.txt")); - ASSERT_TRUE(test_file.is_open()) << "Failed to open input file"; - std::string line; - std::getline(test_file, line); - test_file.close(); - base_count = std::stoi(line); - } - - [[nodiscard]] size_t GetCount() const { return static_cast(base_count * GetParam()); } - - int base_count = 0; -}; - -TEST_P(NesterovATestTaskSTL, MatmulFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - nesterov_a_test_task_stl::TestTaskSTL test_task_stl(in); - ASSERT_TRUE(test_task_stl.Validation()); - test_task_stl.PreProcessing(); - test_task_stl.Run(); - test_task_stl.PostProcessing(); - - EXPECT_EQ(in, test_task_stl.Get()); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(FileMatrixTestsSTL, NesterovATestTaskSTL, ::testing::Values(0.5, 1.0)); diff --git a/tasks/stl/example/include/ops_stl.hpp b/tasks/stl/example/include/ops_stl.hpp deleted file mode 100644 index a99e99812..000000000 --- a/tasks/stl/example/include/ops_stl.hpp +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_stl { - -class TestTaskSTL : public ppc::core::Task { - public: - explicit TestTaskSTL(const std::vector& in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; -}; - -} // namespace nesterov_a_test_task_stl diff --git a/tasks/stl/example/perf_tests/main.cpp b/tasks/stl/example/perf_tests/main.cpp deleted file mode 100644 index fca8a437a..000000000 --- a/tasks/stl/example/perf_tests/main.cpp +++ /dev/null @@ -1,54 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" -#include "stl/example/include/ops_stl.hpp" - -class NesterovTaskSTLTest : public ::testing::TestWithParam { - protected: - static void RunTest(ppc::core::PerfResults::TypeOfRunning mode) { - constexpr size_t kCount = 450; - - // Create data - std::vector in(kCount * kCount, 0); - for (size_t i = 0; i < kCount; i++) { - in[(i * kCount) + i] = 1; - } - - // Create Task - auto test_task_stl = std::make_shared(in); - - // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task_stl); - - // Create Perf attributes - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto current_time_point = std::chrono::high_resolution_clock::now(); - auto duration = std::chrono::duration_cast(current_time_point - t0).count(); - return static_cast(duration) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf_analyzer.PipelineRun(perf_attr); - } else { - perf_analyzer.TaskRun(perf_attr); - } - - perf_analyzer.PrintPerfStatistic(); - - ASSERT_EQ(in, test_task_stl->Get()); - } -}; - -TEST_P(NesterovTaskSTLTest, RunModes) { RunTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(NesterovSTLTests, NesterovTaskSTLTest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/stl/example/src/ops_stl.cpp b/tasks/stl/example/src/ops_stl.cpp deleted file mode 100644 index 145d987e8..000000000 --- a/tasks/stl/example/src/ops_stl.cpp +++ /dev/null @@ -1,47 +0,0 @@ -#include "stl/example/include/ops_stl.hpp" - -#include -#include -#include -#include -#include - -#include "core/util/include/util.hpp" - -namespace { -void MatMul(const std::vector &in_vec, int rc_size, std::vector &out_vec) { - for (int i = 0; i < rc_size; ++i) { - for (int j = 0; j < rc_size; ++j) { - out_vec[(i * rc_size) + j] = 0; - for (int k = 0; k < rc_size; ++k) { - out_vec[(i * rc_size) + j] += in_vec[(i * rc_size) + k] * in_vec[(k * rc_size) + j]; - } - } - } -} -} // namespace - -bool nesterov_a_test_task_stl::TestTaskSTL::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_stl::TestTaskSTL::PreProcessingImpl() { - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_stl::TestTaskSTL::RunImpl() { - const int num_threads = ppc::util::GetNumThreads(); - std::vector threads(num_threads); - for (int i = 0; i < num_threads; i++) { - threads[i] = std::thread(MatMul, std::cref(input_), rc_size_, std::ref(output_)); - threads[i].join(); - } - return true; -} - -bool nesterov_a_test_task_stl::TestTaskSTL::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_stl::TestTaskSTL::Get() { return output_; } diff --git a/tasks/stl/runner.cpp b/tasks/stl/runner.cpp deleted file mode 100644 index 4d820af77..000000000 --- a/tasks/stl/runner.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/tasks/tbb/example/data/test.txt b/tasks/tbb/example/data/test.txt deleted file mode 100644 index 105d7d9ad..000000000 --- a/tasks/tbb/example/data/test.txt +++ /dev/null @@ -1 +0,0 @@ -100 \ No newline at end of file diff --git a/tasks/tbb/example/func_tests/main.cpp b/tasks/tbb/example/func_tests/main.cpp deleted file mode 100644 index 1e2d019be..000000000 --- a/tasks/tbb/example/func_tests/main.cpp +++ /dev/null @@ -1,44 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/util/include/util.hpp" -#include "tbb/example/include/ops_tbb.hpp" - -class NesterovATestTaskTBB : public ::testing::TestWithParam { - protected: - void SetUp() override { - std::ifstream test_file(ppc::util::GetAbsolutePath("tbb/example/data/test.txt")); - ASSERT_TRUE(test_file.is_open()) << "Failed to open input file"; - std::string line; - std::getline(test_file, line); - test_file.close(); - base_count = std::stoi(line); - } - - [[nodiscard]] size_t GetCount() const { return static_cast(base_count * GetParam()); } - - int base_count = 0; -}; - -TEST_P(NesterovATestTaskTBB, MatmulFromFile) { - const size_t count = GetCount(); - - std::vector in(count * count, 0); - for (size_t i = 0; i < count; ++i) { - in[(i * count) + i] = 1; - } - - nesterov_a_test_task_tbb::TestTaskTBB test_task_tbb(in); - ASSERT_TRUE(test_task_tbb.Validation()); - test_task_tbb.PreProcessing(); - test_task_tbb.Run(); - test_task_tbb.PostProcessing(); - - EXPECT_EQ(in, test_task_tbb.Get()); -} - -INSTANTIATE_TEST_SUITE_P_NOLINT(FileMatrixTestsTBB, NesterovATestTaskTBB, ::testing::Values(0.5, 1.0)); diff --git a/tasks/tbb/example/include/ops_tbb.hpp b/tasks/tbb/example/include/ops_tbb.hpp deleted file mode 100644 index b585d7b52..000000000 --- a/tasks/tbb/example/include/ops_tbb.hpp +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once - -#include - -#include "core/task/include/task.hpp" - -namespace nesterov_a_test_task_tbb { - -class TestTaskTBB : public ppc::core::Task { - public: - explicit TestTaskTBB(const std::vector& in) : input_(in) {} - bool ValidationImpl() override; - bool PreProcessingImpl() override; - bool RunImpl() override; - bool PostProcessingImpl() override; - std::vector Get(); - - private: - std::vector input_, output_; - int rc_size_{}; -}; - -} // namespace nesterov_a_test_task_tbb diff --git a/tasks/tbb/example/perf_tests/main.cpp b/tasks/tbb/example/perf_tests/main.cpp deleted file mode 100644 index e53853276..000000000 --- a/tasks/tbb/example/perf_tests/main.cpp +++ /dev/null @@ -1,54 +0,0 @@ -#include - -#include -#include -#include -#include - -#include "core/perf/include/perf.hpp" -#include "core/util/include/util.hpp" -#include "tbb/example/include/ops_tbb.hpp" - -class NesterovTaskTBBTest : public ::testing::TestWithParam { - protected: - static void RunTest(ppc::core::PerfResults::TypeOfRunning mode) { - constexpr size_t kCount = 450; - - // Create data - std::vector in(kCount * kCount, 0); - for (size_t i = 0; i < kCount; i++) { - in[(i * kCount) + i] = 1; - } - - // Create Task - auto test_task_tbb = std::make_shared(in); - - // Create Perf analyzer - ppc::core::Perf perf_analyzer(test_task_tbb); - - // Create Perf attributes - ppc::core::PerfAttr perf_attr; - const auto t0 = std::chrono::high_resolution_clock::now(); - perf_attr.current_timer = [&] { - auto current_time_point = std::chrono::high_resolution_clock::now(); - auto duration = std::chrono::duration_cast(current_time_point - t0).count(); - return static_cast(duration) * 1e-9; - }; - - if (mode == ppc::core::PerfResults::TypeOfRunning::kPipeline) { - perf_analyzer.PipelineRun(perf_attr); - } else { - perf_analyzer.TaskRun(perf_attr); - } - - perf_analyzer.PrintPerfStatistic(); - - ASSERT_EQ(in, test_task_tbb->Get()); - } -}; - -TEST_P(NesterovTaskTBBTest, RunModes) { RunTest(GetParam()); } - -INSTANTIATE_TEST_SUITE_P_NOLINT(NesterovTBBTests, NesterovTaskTBBTest, - ::testing::Values(ppc::core::PerfResults::TypeOfRunning::kPipeline, - ppc::core::PerfResults::TypeOfRunning::kTaskRun)); diff --git a/tasks/tbb/example/src/ops_tbb.cpp b/tasks/tbb/example/src/ops_tbb.cpp deleted file mode 100644 index aaa423ebc..000000000 --- a/tasks/tbb/example/src/ops_tbb.cpp +++ /dev/null @@ -1,44 +0,0 @@ -#include "tbb/example/include/ops_tbb.hpp" - -#include - -#include -#include -#include -#include - -#include "oneapi/tbb/parallel_for.h" - -namespace { -void MatMul(const std::vector &in_vec, int rc_size, std::vector &out_vec) { - for (int i = 0; i < rc_size; ++i) { - for (int j = 0; j < rc_size; ++j) { - out_vec[(i * rc_size) + j] = 0; - for (int k = 0; k < rc_size; ++k) { - out_vec[(i * rc_size) + j] += in_vec[(i * rc_size) + k] * in_vec[(k * rc_size) + j]; - } - } - } -} -} // namespace - -bool nesterov_a_test_task_tbb::TestTaskTBB::ValidationImpl() { - auto sqrt_size = static_cast(std::sqrt(input_.size())); - return sqrt_size * sqrt_size == static_cast(input_.size()); -} - -bool nesterov_a_test_task_tbb::TestTaskTBB::PreProcessingImpl() { - rc_size_ = static_cast(std::sqrt(input_.size())); - output_ = std::vector(input_.size(), 0); - return true; -} - -bool nesterov_a_test_task_tbb::TestTaskTBB::RunImpl() { - tbb::parallel_for(0, ppc::util::GetNumThreads(), [&](int i) { MatMul(input_, rc_size_ - i, output_); }); - MatMul(input_, rc_size_, output_); - return true; -} - -bool nesterov_a_test_task_tbb::TestTaskTBB::PostProcessingImpl() { return true; } - -std::vector nesterov_a_test_task_tbb::TestTaskTBB::Get() { return output_; }