diff --git a/.ci/pipeline/codecov-lnx.yml b/.ci/pipeline/codecov-lnx.yml index 102e7ff147..7ef026e3e8 100644 --- a/.ci/pipeline/codecov-lnx.yml +++ b/.ci/pipeline/codecov-lnx.yml @@ -18,13 +18,13 @@ steps: . /usr/share/miniconda/etc/profile.d/conda.sh conda activate CB coverage combine .coverage.sklearnex .coverage.sklearn - coverage json -o coverage.json + coverage lcov -o coverage.info displayName: "Create coverage report" - script: | curl -Os https://cli.codecov.io/latest/linux/codecov chmod +x codecov export VARARGS="-n azure-${AGENT_OS}-$(PYTHON_VERSION)-$(SKLEARN_VERSION)" - ./codecov -v upload-process -Z -t ${CODECOV_TOKEN} "${VARARGS}" -F azure -f coverage.json + ./codecov -v upload-process -Z -t ${CODECOV_TOKEN} "${VARARGS}" -F azure -f coverage.info displayName: "Upload to codecov" env: CODECOV_TOKEN: $(CODECOV_TOKEN) diff --git a/.ci/pipeline/codecov-win.yml b/.ci/pipeline/codecov-win.yml index 76bd60494b..e5cf4a62ce 100644 --- a/.ci/pipeline/codecov-win.yml +++ b/.ci/pipeline/codecov-win.yml @@ -17,13 +17,13 @@ steps: - script: | call activate CB coverage combine .coverage.sklearnex .coverage.sklearn - coverage json -o coverage.json + coverage lcov -o coverage.info displayName: "Create coverage report" - script: | set PATH=C:\msys64\usr\bin;%PATH% curl -Os https://cli.codecov.io/latest/windows/codecov.exe set VARARGS=-n azure-%AGENT_OS%-$(PYTHON_VERSION)-$(SKLEARN_VERSION) - .\codecov.exe -v upload-process -Z -t %CODECOV_TOKEN% %VARARGS% -F azure -f coverage.json + .\codecov.exe -v upload-process -Z -t %CODECOV_TOKEN% %VARARGS% -F azure -f coverage.info displayName: "Upload to codecov" env: CODECOV_TOKEN: $(CODECOV_TOKEN) diff --git a/.ci/scripts/run_sklearn_tests.py b/.ci/scripts/run_sklearn_tests.py index a7f5b04b7f..4dac925b2f 100644 --- a/.ci/scripts/run_sklearn_tests.py +++ b/.ci/scripts/run_sklearn_tests.py @@ -58,6 +58,7 @@ pytest_args += ( "--cov=onedal", "--cov=sklearnex", + "--cov-branch", f"--cov-config={rc}", "--cov-report=", ) diff --git a/.github/scripts/generate_coverage_reports.sh b/.github/scripts/generate_coverage_reports.sh new file mode 100644 index 0000000000..72f2f9eab8 --- /dev/null +++ b/.github/scripts/generate_coverage_reports.sh @@ -0,0 +1,46 @@ +#=============================================================================== +# Copyright Contributors to the oneDAL project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#=============================================================================== + +ci_dir=$(dirname $(dirname $(dirname "${BASH_SOURCE[0]}"))) +cd $ci_dir + +# create coverage.py report +coverage combine .coverage.sklearnex .coverage.sklearn +coverage lcov -o coverage_py_"${1}".info + +# create gcov report (lcov format) +if [[ -n "${SKLEARNEX_GCOV}" ]]; then + # extract llvm tool for gcov processing + if [[ -z "$2" ]]; then + GCOV_EXE="$(dirname $(type -P -a icx))/compiler/llvm-cov gcov" + else + GCOV_EXE="gcov" + fi + echo $GCOV_EXE + FILTER=$(realpath ./onedal).* + echo $FILTER + + NUMPY_TEST=$(python -m pip freeze | grep numpy) + # install dependencies + # proper operation of gcov with sklearnex requires the header files from + # the build numpy, this must be previously set as NUMPY_BUILD + python -m pip install gcovr $NUMPY_BUILD + + gcovr --gcov-executable "${GCOV_EXE}" -r . -v --lcov --filter "${FILTER}" -o coverage_cpp_"${1}".info + + # reinstall previous numpy + python -m pip install $NUMPY_TEST +fi diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9450f922ae..248f7cb0fc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -95,6 +95,7 @@ jobs: echo "DPCFLAG=${DPCFLAG}" >> "$GITHUB_OUTPUT" # enable coverage report generation echo "COVERAGE_RCFILE=$(readlink -f .coveragerc)" >> "$GITHUB_ENV" + if [[ -z $DPCFLAG ]]; then echo "SKLEARNEX_GCOV=1" >> "$GITHUB_ENV"; fi - name: apt-get run: sudo apt-get update && sudo apt-get install -y clang-format - name: dpcpp installation @@ -111,6 +112,7 @@ jobs: source venv/bin/activate pip install -r dependencies-dev pip list + echo "NUMPY_BUILD=$(python -m pip freeze | grep numpy)" >> "$GITHUB_ENV" - name: Build daal4py/sklearnex run: | source venv/bin/activate @@ -142,13 +144,14 @@ jobs: - name: Create coverage report run: | source venv/bin/activate - coverage combine .coverage.sklearnex .coverage.sklearn - coverage json -o coverage.lnx${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.json + source .github/scripts/activate_components.sh ${{ steps.set-env.outputs.DPCFLAG }} + bash .github/scripts/generate_coverage_reports.sh lnx${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }} ${{ steps.set-env.outputs.DPCFLAG }} - name: Archive coverage report uses: actions/upload-artifact@v4 with: name: coverage_lnx_Py${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }} - path: coverage.lnx${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.json + path: | + *_lnx${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.info - name: Sklearn testing [preview] run: | source venv/bin/activate @@ -218,7 +221,12 @@ jobs: echo C:\msys64\usr\bin;>> %GITHUB_PATH% echo NO_DIST=YES>> %GITHUB_ENV% set DPCTL_TEMP="${{ env.DPCTL_PY_VERSIONS }}" - if not %DPCTL_TEMP:${{ matrix.PYTHON_VERSION }}=%==%DPCTL_TEMP% (echo DPCFLAG=>> %GITHUB_OUTPUT%) else (echo DPCFLAG="0">> %GITHUB_OUTPUT%) + if not %DPCTL_TEMP:${{ matrix.PYTHON_VERSION }}=%==%DPCTL_TEMP% ( + echo DPCFLAG=>> %GITHUB_OUTPUT% + echo SKLEARNEX_GCOV=YES>> %GITHUB_ENV% + ) else ( + echo DPCFLAG="0">> %GITHUB_OUTPUT% + ) echo COVERAGE_RCFILE=%cd%\.coveragerc>> %GITHUB_ENV% - name: Download Intel OpenCL CPU Runtime artifact if: ${{ steps.set-env.outputs.DPCFLAG == '' }} @@ -242,6 +250,7 @@ jobs: pip install --upgrade setuptools pip install cpufeature clang-format pyyaml pip install -r dependencies-dev + for /f "delims=" %%c in ('python -m pip freeze ^| grep numpy') do echo NUMPY_BUILD=%%c>> %GITHUB_ENV% - name: System info shell: cmd run: | @@ -288,13 +297,14 @@ jobs: shell: cmd run: | call .\venv\Scripts\activate.bat - coverage combine .coverage.sklearnex .coverage.sklearn - coverage json -o coverage.win${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.json + call .\.github\scripts\activate_components.bat ${{ steps.set-env.outputs.DPCFLAG }} + bash .github/scripts/generate_coverage_reports.sh win${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }} - name: Archive coverage report uses: actions/upload-artifact@v4 with: name: coverage_win_Py${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }} - path: coverage.win${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.json + path: | + *_win${{ matrix.PYTHON_VERSION }}_${{ matrix.SKLEARN_VERSION }}.info - name: Sklearn testing [preview] shell: cmd run: | diff --git a/conda-recipe/run_test.bat b/conda-recipe/run_test.bat index 2be86075d2..59519b2801 100644 --- a/conda-recipe/run_test.bat +++ b/conda-recipe/run_test.bat @@ -34,7 +34,7 @@ if "%PYTHON%"=="python" ( set "PYTEST_ARGS= " -IF DEFINED COVERAGE_RCFILE (set "PYTEST_ARGS=--cov=onedal --cov=sklearnex --cov-config=%COVERAGE_RCFILE% --cov-append --cov-report= %PYTEST_ARGS%") +IF DEFINED COVERAGE_RCFILE (set "PYTEST_ARGS=--cov=onedal --cov=sklearnex --cov-config=%COVERAGE_RCFILE% --cov-append --cov-branch --cov-report= %PYTEST_ARGS%") rem Note: execute with argument --json-report as second argument rem in order to produce a JSON report under folder '.pytest_reports'. diff --git a/conda-recipe/run_test.sh b/conda-recipe/run_test.sh index 92e8b97672..28620adfb5 100755 --- a/conda-recipe/run_test.sh +++ b/conda-recipe/run_test.sh @@ -55,7 +55,7 @@ function generate_pytest_args { ARGS+=("--json-report-file=.pytest_reports/$1_report.json") fi if [ -n "${COVERAGE_RCFILE}" ]; then - ARGS+=(--cov=onedal --cov=sklearnex --cov-config="${COVERAGE_RCFILE}" --cov-append --cov-report=) + ARGS+=(--cov=onedal --cov=sklearnex --cov-config="${COVERAGE_RCFILE}" --cov-append --cov-branch --cov-report=) fi printf -- "${ARGS[*]}" } diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index ed6a781b76..236188df6d 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -29,6 +29,9 @@ set(CMAKE_CXX_EXTENSIONS OFF) option(ADD_ONEDAL_RPATH "Adds oneDAL's file paths to the RPATH here" OFF) message(STATUS "ADD_ONEDAL_RPATH:" ${ADD_ONEDAL_RPATH}) +option(SKLEARNEX_GCOV "Compile with gcov" OFF) +message(STATUS "SKLEARNEX_GCOV:" ${SKLEARNEX_GCOV}) + if(WIN32) # hint CMake to get python from PYTHON env. variable if defined if(DEFINED ENV{PYTHON}) @@ -122,6 +125,23 @@ if(IFACE STREQUAL "host") endif() endif() + if(SKLEARNEX_GCOV) + if(CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES ".*Clang") + if(WIN32) + set(CMAKE_CXX_FLAGS "/clang:--coverage ${CMAKE_CXX_FLAGS}") + list(APPEND ONEDAL_LIBRARIES "clang_rt.profile-x86_64.lib") + else() + set(CMAKE_CXX_FLAGS "--coverage ${CMAKE_CXX_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "--coverage ${CMAKE_SHARED_LINKER_FLAGS}") + endif() + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + set(CMAKE_CXX_FLAGS "--coverage ${CMAKE_CXX_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "--coverage ${CMAKE_SHARED_LINKER_FLAGS}") + else() + message(WARNING "Code coverage will not be generated for target: host") + endif() + endif() + list(APPEND COMPILE_DEFINITIONS "NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION") elseif(IFACE_IS_DPC OR IFACE_IS_SPMD_DPC) @@ -137,11 +157,12 @@ elseif(IFACE_IS_DPC OR IFACE_IS_SPMD_DPC) set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF) endif() - if(CMAKE_CXX_COMPILER MATCHES ".*icpx" OR CMAKE_CXX_COMPILER MATCHES ".*icx") + if(CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM") set(CMAKE_CXX_FLAGS "-fsycl ${CMAKE_CXX_FLAGS}") + endif() - if(CMAKE_C_COMPILER MATCHES ".*icpx" OR CMAKE_C_COMPILER MATCHES ".*icx") + if(CMAKE_C_COMPILER_ID STREQUAL "IntelLLVM") set(CMAKE_C_FLAGS "-fsycl ${CMAKE_C_FLAGS}") endif() @@ -169,6 +190,18 @@ elseif(IFACE_IS_DPC OR IFACE_IS_SPMD_DPC) endif() endif() + if(SKLEARNEX_GCOV) + if(CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM" AND WIN32) + set(CMAKE_CXX_FLAGS "/clang:-Xarch_host /clang:--coverage ${CMAKE_CXX_FLAGS}") + list(APPEND ONEDAL_LIBRARIES "clang_rt.profile-x86_64.lib") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM" AND UNIX) + set(CMAKE_CXX_FLAGS "-Xarch_host --coverage ${CMAKE_CXX_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "-Xarch_host --coverage ${CMAKE_SHARED_LINKER_FLAGS}") + else() + message(WARNING "Code coverage will not be generated for target: " ${IFACE}) + endif() + endif() + if(IFACE_IS_SPMD_DPC) set(MPI_LIBRARY ${MPI_LIBS}) endif() diff --git a/scripts/build_backend.py b/scripts/build_backend.py index f3546f480e..a6949f8145 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -49,6 +49,7 @@ def custom_build_cmake_clib( no_dist=True, use_parameters_lib=True, use_abs_rpath=False, + use_gcov=False, ): import pybind11 @@ -130,6 +131,9 @@ def custom_build_cmake_clib( if use_abs_rpath: cmake_args += ["-DADD_ONEDAL_RPATH=ON"] + if use_gcov: + cmake_args += ["-DSKLEARNEX_GCOV=ON"] + cpu_count = multiprocessing.cpu_count() # limit parallel cmake jobs if memory size is insufficient # TODO: add on all platforms diff --git a/setup.py b/setup.py index 8166f00d07..ed48c226e3 100644 --- a/setup.py +++ b/setup.py @@ -92,6 +92,7 @@ no_dist = True if "NO_DIST" in os.environ and os.environ["NO_DIST"] in trues else False no_dpc = True if "NO_DPC" in os.environ and os.environ["NO_DPC"] in trues else False no_stream = "NO_STREAM" in os.environ and os.environ["NO_STREAM"] in trues +use_gcov = "SKLEARNEX_GCOV" in os.environ and os.environ["SKLEARNEX_GCOV"] in trues debug_build = os.getenv("DEBUG_BUILD") == "1" mpi_root = None if no_dist else os.environ["MPIROOT"] dpcpp = ( @@ -426,6 +427,7 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, + use_gcov=use_gcov, ) if dpcpp: if is_onedal_iface: @@ -435,6 +437,7 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, + use_gcov=use_gcov, ) if build_distribute: build_backend.custom_build_cmake_clib( @@ -443,6 +446,7 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, + use_gcov=use_gcov, ) def post_build(self):