Skip to content

re-write dpnp.hypot #2584

re-write dpnp.hypot

re-write dpnp.hypot #2584

Workflow file for this run

name: Conda package
on:
push:
branches:
- master
pull_request:
env:
PACKAGE_NAME: dpnp
MODULE_NAME: dpnp
CHANNELS: '-c dppy/label/dev -c intel -c conda-forge --override-channels'
# TODO: to add test_arraymanipulation.py back to the scope once crash on Windows is gone
TEST_SCOPE: >-
test_arraycreation.py
test_dot.py
test_dparray.py
test_copy.py
test_fft.py
test_linalg.py
test_logic.py
test_manipulation.py
test_mathematical.py
test_random_state.py
test_sort.py
test_special.py
test_sycl_queue.py
test_umath.py
test_usm_type.py
third_party/cupy/core_tests/test_ndarray_complex_ops.py
third_party/cupy/linalg_tests/test_product.py
third_party/cupy/logic_tests/test_comparison.py
third_party/cupy/logic_tests/test_truth.py
third_party/cupy/manipulation_tests/test_basic.py
third_party/cupy/manipulation_tests/test_join.py
third_party/cupy/manipulation_tests/test_rearrange.py
third_party/cupy/manipulation_tests/test_transpose.py
third_party/cupy/math_tests/test_explog.py
third_party/cupy/math_tests/test_misc.py
third_party/cupy/math_tests/test_trigonometric.py
third_party/cupy/sorting_tests/test_sort.py
VER_JSON_NAME: 'version.json'
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))"
jobs:
build:
name: Build ['${{ matrix.os }}', python='${{ matrix.python }}']
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.os }}
defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}
continue-on-error: false
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.11.0
with:
access_token: ${{ github.token }}
- name: Checkout DPNP repo
uses: actions/checkout@v3.5.2
with:
fetch-depth: 0
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2.2.0
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'build'
- if: matrix.os == 'ubuntu-20.04'
name: Store conda paths as envs on Linux
run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/linux-64/" >> $GITHUB_ENV
- if: matrix.os == 'windows-latest'
name: Store conda paths as envs on Win
run: |
@echo on
(echo CONDA_BLD=%CONDA_PREFIX%\conda-bld\win-64\) >> %GITHUB_ENV%
- name: Install conda-build
run: conda install conda-build
- name: Cache conda packages
uses: actions/cache@v3.3.0
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.CONDA_PKGS_DIR }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Build conda package
run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe
- name: Upload artifact
uses: actions/upload-artifact@v3.1.2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2
test_linux:
name: Test ['${{ matrix.os }}', python='${{ matrix.python }}']
needs: build
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, ubuntu-latest]
continue-on-error: true
env:
conda-pkgs: '/home/runner/conda_pkgs_dir/'
channel-path: '${{ github.workspace }}/channel/'
pkg-path-in-channel: '${{ github.workspace }}/channel/linux-64/'
extracted-pkg-path: '${{ github.workspace }}/pkg/'
tests-path: '${{ github.workspace }}/pkg/info/test/tests/'
ver-json-path: '${{ github.workspace }}/version.json'
steps:
- name: Download artifact
uses: actions/download-artifact@v3.0.2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}
- name: Extract package archive
run: |
mkdir -p ${{ env.extracted-pkg-path }}
tar -xvf ${{ env.pkg-path-in-channel }}/${{ env.PACKAGE_NAME }}-*.tar.bz2 -C ${{ env.extracted-pkg-path }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2.2.0
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'test'
# Needed to be able to run conda index
- name: Install conda-build
run: conda install conda-build
- name: Create conda channel
run: conda index ${{ env.channel-path }}
- name: Test conda channel
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}
cat ${{ env.ver-json-path }}
- name: Collect dependencies
run: |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")
echo PACKAGE_VERSION=${PACKAGE_VERSION}
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV
conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
cat lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: Cache conda packages
uses: actions/cache@v3.3.0
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install dpnp
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: List installed packages
run: conda list
- name: Smoke test
run: |
python -c "import dpnp, dpctl; dpctl.lsplatform()"
python -c "import dpnp; print(dpnp.__version__)"
# TODO: run the whole scope once the issues on CPU are resolved
- name: Run tests
run: |
python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
working-directory: ${{ env.tests-path }}
test_windows:
name: Test ['windows-latest', python='${{ matrix.python }}']
needs: build
runs-on: windows-latest
defaults:
run:
shell: cmd /C CALL {0}
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
continue-on-error: true
env:
conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\'
channel-path: '${{ github.workspace }}\channel\'
pkg-path-in-channel: '${{ github.workspace }}\channel\win-64\'
extracted-pkg-path: '${{ github.workspace }}\pkg'
tests-path: '${{ github.workspace }}\pkg\info\test\tests\'
ver-json-path: '${{ github.workspace }}\version.json'
active-env-name: 'test'
steps:
- name: Download artifact
uses: actions/download-artifact@v3.0.2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}
- name: Extract package archive
run: |
@echo on
mkdir -p ${{ env.extracted-pkg-path }}
set SEARCH_SCRIPT="DIR ${{ env.pkg-path-in-channel }} /s/b | FINDSTR /r "dpnp-.*\.tar\.bz2""
FOR /F "tokens=* USEBACKQ" %%F IN (`%SEARCH_SCRIPT%`) DO (
SET FULL_PACKAGE_PATH=%%F
)
echo FULL_PACKAGE_PATH: %FULL_PACKAGE_PATH%
python -c "import shutil; shutil.unpack_archive(r\"%FULL_PACKAGE_PATH%\", extract_dir=r\"${{ env.extracted-pkg-path }}\")"
dir ${{ env.extracted-pkg-path }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2.2.0
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: ${{ env.active-env-name }}
- name: Store conda paths as envs
run: |
@echo on
(echo CONDA_LIB_PATH=%CONDA_PREFIX%\Library\lib\) >> %GITHUB_ENV%
(echo CONDA_LIB_BIN_PATH=%CONDA_PREFIX%\Library\bin\) >> %GITHUB_ENV%
# Needed to be able to run conda index
- name: Install conda-build
run: conda install conda-build
- name: Create conda channel
run: conda index ${{ env.channel-path }}
- name: Test conda channel
run: |
@echo on
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}
- name: Dump version.json
run: more ${{ env.ver-json-path }}
- name: Collect dependencies
run: |
@echo on
set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
echo PACKAGE_VERSION: %PACKAGE_VERSION%
(echo PACKAGE_VERSION=%PACKAGE_VERSION%) >> %GITHUB_ENV%
conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: Dump lockfile
run: more lockfile
- name: Cache conda packages
uses: actions/cache@v3.3.0
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install opencl_rt
run: conda install opencl_rt -c intel --override-channels
- name: Install dpnp
run: |
@echo on
conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: List installed packages
run: conda list
- name: Activate OCL CPU RT
shell: pwsh
run: |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1"
&$script_path
# Check the variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default
$cl_cfg="$env:CONDA_PREFIX\Library\lib\cl.cfg"
Get-Content -Tail 5 -Path $cl_cfg
- name: Smoke test
run: |
python -c "import dpnp, dpctl; dpctl.lsplatform()"
python -c "import dpnp; print(dpnp.__version__)"
# TODO: run the whole scope once the issues on CPU are resolved
- name: Run tests
run: |
python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
working-directory: ${{ env.tests-path }}
upload:
name: Upload ['${{ matrix.os }}', python='${{ matrix.python }}']
needs: [test_linux, test_windows]
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.os }}
defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}
continue-on-error: true
if: |
(github.repository == 'IntelPython/dpnp') &&
(github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/'))
steps:
- name: Download artifact
uses: actions/download-artifact@v3.0.2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2.2.0
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'upload'
- name: Install anaconda-client
run: conda install anaconda-client
- name: Upload
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2
env:
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}