Adds device
keyword argument to astype
#3710
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Conda package | |
on: | |
push: | |
branches: | |
- master | |
pull_request: | |
permissions: read-all | |
env: | |
PACKAGE_NAME: dpctl | |
MODULE_NAME: dpctl | |
VER_SCRIPT1: "import json; f = open('ver.json', 'r'); j = json.load(f); f.close(); " | |
VER_SCRIPT2: "d = j['dpctl'][0]; print('='.join((d[s] for s in ('version', 'build'))))" | |
jobs: | |
build_linux: | |
runs-on: ubuntu-20.04 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-build | |
run: conda install conda-build | |
- name: Store conda paths as envs | |
shell: bash -l {0} | |
run: | | |
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV | |
- name: Build conda package | |
run: | | |
# use bootstrap channel to pull NumPy linked with OpenBLAS | |
CHANNELS="-c dppy/label/bootstrap -c intel -c conda-forge --override-channels" | |
VERSIONS="--python ${{ matrix.python }} --numpy 1.23" | |
TEST="--no-test" | |
conda build \ | |
$TEST \ | |
$VERSIONS \ | |
$CHANNELS \ | |
conda-recipe | |
- name: Upload artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: /usr/share/miniconda/conda-bld/linux-64/${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload wheels artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
path: ${{ env.WHEELS_OUTPUT_FOLDER }}${{ env.PACKAGE_NAME }}-*.whl | |
build_windows: | |
runs-on: windows-latest | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
env: | |
conda-bld: C:\Miniconda\conda-bld\win-64\ | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- uses: conda-incubator/setup-miniconda@v2 | |
with: | |
auto-activate-base: true | |
conda-build-version: "*" | |
activate-environment: true | |
python-version: ${{ matrix.python }} | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: /home/runner/conda_pkgs_dir | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Store conda paths as envs | |
shell: bash -l {0} | |
run: | | |
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV | |
- name: Build conda package | |
env: | |
OVERRIDE_INTEL_IPO: 1 # IPO requires more resources that GH actions VM provides | |
run: conda build --no-test --python ${{ matrix.python }} -c intel -c conda-forge --override-channels conda-recipe | |
- name: Upload artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload wheels artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
path: ${{ env.WHEELS_OUTPUT_FOLDER }}${{ env.PACKAGE_NAME }}-*.whl | |
test_linux: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
experimental: [false] | |
runner: [ubuntu-20.04] | |
continue-on-error: ${{ matrix.experimental }} | |
env: | |
CHANNELS: -c intel -c conda-forge --override-channels | |
steps: | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-build | |
# Needed to be able to run conda index | |
run: conda install conda-build | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n test_dpctl $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
cat lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install dpctl | |
run: | | |
export CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export TEST_DEPENDENCIES="pytest pytest-cov cython" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n test_dpctl $PACKAGE_NAME=${PACKAGE_VERSION} ${TEST_DEPENDENCIES} python=${{ matrix.python }} ${CHANNELS} | |
# Test installed packages | |
conda list -n test_dpctl | |
- name: Smoke test | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate test_dpctl | |
python -c "import dpctl; dpctl.lsplatform(verbosity=2)" | |
- name: Install gdb | |
run: | | |
sudo apt-get update --fix-missing | |
sudo apt-get install -y gdb | |
- name: Run test_elementwise under gdb | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate test_dpctl | |
gdb --batch -ex r -ex 'info sharedlibrary' -ex 'set print elements 1000' -ex bt --args ${CONDA_PREFIX}/bin/python -m pytest -q -ra --disable-warnings --pyargs dpctl.tests.elementwise.test_trigonometric::test_trig_order -vv || true | |
- name: Run tests | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate test_dpctl | |
python -m pytest -v --pyargs $MODULE_NAME | |
test_windows: | |
needs: build_windows | |
runs-on: ${{ matrix.runner }} | |
defaults: | |
run: | |
shell: cmd /C CALL {0} | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
experimental: [false] | |
runner: [windows-latest] | |
continue-on-error: ${{ matrix.experimental }} | |
env: | |
workdir: '${{ github.workspace }}' | |
CHANNELS: -c intel -c conda-forge --override-channels | |
steps: | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- uses: conda-incubator/setup-miniconda@v2 | |
with: | |
auto-update-conda: true | |
conda-build-version: '*' | |
miniconda-version: 'latest' | |
activate-environment: dpctl_test | |
python-version: ${{ matrix.python }} | |
- name: Create conda channel with the artifact bit | |
shell: cmd /C CALL {0} | |
run: | | |
echo ${{ env.workdir }} | |
mkdir ${{ env.workdir }}\channel\win-64 | |
move ${{ env.PACKAGE_NAME }}-*.tar.bz2 ${{ env.workdir }}\channel\win-64 | |
dir ${{ env.workdir }}\channel\win-64 | |
- name: Index the channel | |
shell: cmd /C CALL {0} | |
run: conda index ${{ env.workdir }}\channel | |
- name: Dump dpctl version info from created channel into ver.json | |
shell: cmd /C CALL {0} | |
run: | | |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.workdir }}/channel --override-channels --info --json > ${{ env.workdir }}\ver.json | |
- name: Output content of produced ver.json | |
shell: pwsh | |
run: Get-Content -Path ${{ env.workdir }}\ver.json | |
- name: Collect dependencies | |
shell: cmd /C CALL {0} | |
run: | | |
IF NOT EXIST ver.json ( | |
copy /Y ${{ env.workdir }}\ver.json . | |
) | |
SET "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" | |
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( | |
SET PACKAGE_VERSION=%%F | |
) | |
conda install -n dpctl_test ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }} --only-deps --dry-run > lockfile | |
- name: Display lockfile content | |
shell: pwsh | |
run: Get-Content -Path .\lockfile | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: /home/runner/conda_pkgs_dir | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install opencl_rt | |
shell: cmd /C CALL {0} | |
run: conda install -n dpctl_test opencl_rt -c intel --override-channels | |
- name: Install dpctl | |
shell: cmd /C CALL {0} | |
run: | | |
@ECHO ON | |
IF NOT EXIST ver.json ( | |
copy /Y ${{ env.workdir }}\ver.json . | |
) | |
set "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" | |
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( | |
SET PACKAGE_VERSION=%%F | |
) | |
SET "TEST_DEPENDENCIES=pytest pytest-cov cython" | |
conda install -n dpctl_test ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% %TEST_DEPENDENCIES% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }} | |
- name: Report content of test environment | |
shell: cmd /C CALL {0} | |
run: | | |
echo "Value of CONDA enviroment variable was: " %CONDA% | |
echo "Value of CONDA_PREFIX enviroment variable was: " %CONDA_PREFIX% | |
conda info && conda list -n dpctl_test | |
- name: Configure Intel OpenCL CPU RT | |
shell: pwsh | |
run: | | |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1" | |
&$script_path | |
# Check the variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default | |
$cl_cfg="$env:CONDA_PREFIX\Library\lib\cl.cfg" | |
Get-Content -Tail 5 -Path $cl_cfg | |
- name: Smoke test, step 1 | |
shell: cmd /C CALL {0} | |
run: >- | |
conda activate dpctl_test && python -c "import sys; print(sys.executable)" | |
- name: Smoke test, step 2 | |
shell: cmd /C CALL {0} | |
run: >- | |
conda activate dpctl_test && python -m dpctl -f | |
- name: Run tests | |
shell: cmd /C CALL {0} | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
run: >- | |
conda activate dpctl_test && python -m pytest -v -s --pyargs ${{ env.MODULE_NAME }} | |
upload_linux: | |
needs: test_linux | |
if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} | |
runs-on: ubuntu-20.04 | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
steps: | |
- name: Download conda artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Download wheel artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
- name: Install anaconda-client | |
run: conda install anaconda-client | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Package version | |
run: echo "PACKAGE_VERSION=$(basename ${{ env.PACKAGE_NAME }}-*.tar.bz2 | sed 's/^${{ env.PACKAGE_NAME }}-\([^-]*\).*/\1/')" >> $GITHUB_ENV | |
- name: Upload | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: | | |
anaconda --token $ANACONDA_TOKEN upload --user dppy --label dev ${PACKAGE_NAME}-*.tar.bz2 | |
- name: Upload Wheels | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} | |
upload_windows: | |
needs: test_windows | |
if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} | |
runs-on: windows-latest | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
steps: | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Download wheel artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
- uses: conda-incubator/setup-miniconda@v2 | |
with: | |
auto-activate-base: true | |
activate-environment: "" | |
- name: Install anaconda-client | |
run: conda install anaconda-client | |
- name: Package version | |
shell: bash -el {0} | |
run: echo "PACKAGE_VERSION=$(basename ${{ env.PACKAGE_NAME }}-*.tar.bz2 | sed 's/^${{ env.PACKAGE_NAME }}-\([^-]*\).*/\1/')" >> $GITHUB_ENV | |
- name: Upload | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: | | |
anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload Wheels | |
env: | |
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} | |
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} | |
test_examples_linux: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
strategy: | |
matrix: | |
python: ['3.9'] | |
experimental: [false] | |
runner: [ubuntu-20.04] | |
continue-on-error: ${{ matrix.experimental }} | |
env: | |
CHANNELS: -c intel -c conda-forge --override-channels | |
steps: | |
- name: Install conda-build | |
# Needed to be able to run conda index | |
run: conda install conda-build python=${{ matrix.python }} | |
- name: Checkout dpctl repo | |
uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
conda install $PACKAGE_NAME python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install example requirements | |
shell: bash -l {0} | |
env: | |
DPCPP_CMPLR: dpcpp_linux-64">=2024.0.0,<2024.0.1" | |
run: | | |
CHANNELS="${{ env.CHANNELS }}" | |
. $CONDA/etc/profile.d/conda.sh | |
conda create -n examples -y pytest python=${{ matrix.python }} $CHANNELS | |
conda install -n examples -y cmake">=3.22" $CHANNELS || exit 1 | |
conda install -n examples -y ninja $CHANNELS || exit 1 | |
conda install -n examples -y pybind11 cython scikit-build $CHANNELS || exit 1 | |
conda install -n examples -y mkl-dpcpp mkl-devel-dpcpp dpcpp_cpp_rt $CHANNELS || exit 1 | |
conda create -y -n build_env $CHANNELS gcc_linux-64 gxx_linux-64 ${{ env.DPCPP_CMPLR }} sysroot_linux-64">=2.28" | |
- name: Install dpctl | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
CHANNELS="-c $GITHUB_WORKSPACE/channel -c dppy/label/dev -c intel -c conda-forge --override-channels" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda install -n examples -y ${CHANNELS} dpctl=${PACKAGE_VERSION} dpnp">=0.10.1" || exit 1 | |
- name: Build and run examples of pybind11 extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate examples | |
conda list | |
cd examples/pybind11 | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
export MKLROOT=${CONDA_PREFIX} | |
export TBBROOT=${CONDA_PREFIX} | |
conda activate --stack build_env | |
CC=icx CXX=icpx python setup.py build_ext --inplace -G Ninja -- \ | |
-DTBB_LIBRARY_DIR=${TBBROOT}/lib \ | |
-DMKL_LIBRARY_DIR=${MKLROOT}/lib \ | |
-DMKL_INCLUDE_DIR=${MKLROOT}/include \ | |
-DTBB_INCLUDE_DIR=${TBBROOT}/include || exit 1 | |
conda deactivate | |
if [ -e tests ] | |
then | |
LD_LIBRARY_PATH=${CONDA_PREFIX}/lib python -m pytest tests || exit 1 | |
else | |
LD_LIBRARY_PATH=${CONDA_PREFIX}/lib python example.py || exit 1 | |
fi | |
popd | |
done | |
- name: Build and run examples of Cython extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate examples | |
conda list | |
cd examples/cython | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
conda activate --stack build_env | |
python setup.py build_ext --inplace || exit 1 | |
conda deactivate | |
python -m pytest tests || exit 1 | |
popd | |
done | |
- name: Build and run examples of C-extensions | |
shell: bash -l {0} | |
run: | | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate examples | |
conda list | |
cd examples/c | |
for d in $(find . -maxdepth 1 -type d -not -path ".") | |
do | |
pushd $d | |
conda activate --stack build_env | |
python setup.py build_ext --inplace || exit 1 | |
conda deactivate | |
python -m pytest tests || exit 1 | |
popd | |
done | |
- name: Run Python examples | |
shell: bash -l {0} | |
run: | | |
cd examples/python | |
source $CONDA/etc/profile.d/conda.sh | |
conda activate examples | |
for script in $(find . \( -not -name "_*" -and -name "*.py" \)) | |
do | |
echo "Executing ${script}" | |
python ${script} || exit 1 | |
done | |
array-api-conformity: | |
needs: build_linux | |
runs-on: ${{ matrix.runner }} | |
permissions: | |
pull-requests: write | |
strategy: | |
matrix: | |
python: ['3.10'] | |
experimental: [false] | |
runner: [ubuntu-20.04] | |
continue-on-error: ${{ matrix.experimental }} | |
env: | |
CHANNELS: -c intel -c conda-forge --override-channels | |
steps: | |
- name: Checkout dpctl repo | |
uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- name: Cache array API tests | |
id: cache-array-api-tests | |
uses: actions/cache@v3 | |
env: | |
ARRAY_CACHE: 3 | |
with: | |
path: | | |
/home/runner/work/array-api-tests/ | |
key: ${{ runner.os }}-array-api-${{ env.cache-name }}-{{ env.ARRAY_CACHE }}-${{ hashFiles('/home/runner/work/array-api-tests/requirements.txt') }} | |
restore-keys: | | |
${{ runner.os }}-build-${{ env.cache-name }}- | |
${{ runner.os }}-build- | |
${{ runner.os }}- | |
- name: Clone array API tests repo | |
if: steps.cache-array-api-tests.outputs.cache-hit != 'true' | |
shell: bash -l {0} | |
run: | | |
cd /home/runner/work | |
git clone --recurse-submodules https://github.com/data-apis/array-api-tests array-api-tests | |
cd array-api-tests | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Add conda to system path | |
run: echo $CONDA/bin >> $GITHUB_PATH | |
- name: Install conda-build | |
# Needed to be able to run conda index | |
run: conda install conda-build | |
- name: Create conda channel | |
run: | | |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 | |
conda index $GITHUB_WORKSPACE/channel || exit 1 | |
# Test channel | |
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json | |
cat ver.json | |
- name: Collect dependencies | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n test_dpctl $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile | |
cat lockfile | |
- name: Set pkgs_dirs | |
run: | | |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc | |
- name: Cache conda packages | |
uses: actions/cache@v3 | |
env: | |
CACHE_NUMBER: 3 # Increase to reset cache | |
with: | |
path: ~/.conda/pkgs | |
key: | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} | |
restore-keys: | | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- | |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- | |
- name: Install dpctl | |
run: | | |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
conda create -n test_dpctl $PACKAGE_NAME=${PACKAGE_VERSION} pytest python=${{ matrix.python }} $CHANNELS | |
# Test installed packages | |
conda list | |
- name: Install array API test dependencies | |
shell: bash -l {0} | |
run: | | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate test_dpctl | |
cd /home/runner/work/array-api-tests | |
pip install -r requirements.txt | |
- name: Install jq | |
shell: bash -l {0} | |
run: | | |
sudo apt-get install jq | |
- name: Run array API conformance tests | |
id: run-array-api-tests | |
shell: bash -l {0} | |
env: | |
SYCL_CACHE_PERSISTENT: 1 | |
run: | | |
FILE=/home/runner/work/.report.json | |
. $CONDA/etc/profile.d/conda.sh | |
conda activate test_dpctl | |
# echo "libintelocl.so" | tee /etc/OpenCL/vendors/intel-cpu.icd | |
python -c "import dpctl; dpctl.lsplatform()" | |
export ARRAY_API_TESTS_MODULE=dpctl.tensor | |
cd /home/runner/work/array-api-tests | |
pytest --json-report --json-report-file=$FILE --disable-deadline --skips-file ${GITHUB_WORKSPACE}/.github/workflows/array-api-skips.txt array_api_tests/ || true | |
- name: Set Github environment variables | |
shell: bash -l {0} | |
run: | | |
export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") | |
FILE=/home/runner/work/.report.json | |
if test -f "$FILE"; then | |
PASSED_TESTS=$(jq '.summary | .passed // 0' $FILE) | |
FAILED_TESTS=$(jq '.summary | .failed // 0' $FILE) | |
SKIPPED_TESTS=$(jq '.summary | .skipped // 0' $FILE) | |
MESSAGE="Array API standard conformance tests for dpctl=$PACKAGE_VERSION ran successfully. | |
Passed: $PASSED_TESTS | |
Failed: $FAILED_TESTS | |
Skipped: $SKIPPED_TESTS" | |
echo "MESSAGE<<EOF" >> $GITHUB_ENV | |
echo "$MESSAGE" >> $GITHUB_ENV | |
echo "EOF" >> $GITHUB_ENV | |
else | |
echo "Array API standard conformance tests failed to run for dpctl=$PACKAGE_VERSION." | |
exit 1 | |
fi | |
- name: Output API summary | |
shell: bash -l {0} | |
run: echo "::notice ${{ env.MESSAGE }}" | |
- name: Post result to PR | |
if: ${{ github.event.pull_request && !github.event.pull_request.head.repo.fork }} | |
uses: mshick/add-pr-comment@v1 | |
with: | |
message: | | |
${{ env.MESSAGE }} | |
allow-repeats: true | |
repo-token: ${{ secrets.GITHUB_TOKEN }} | |
repo-token-user-login: 'github-actions[bot]' | |
cleanup_packages: | |
name: Clean up anaconda packages | |
needs: [upload_linux, upload_windows] | |
runs-on: 'ubuntu-latest' | |
defaults: | |
run: | |
shell: bash -el {0} | |
steps: | |
- uses: conda-incubator/setup-miniconda@v2 | |
with: | |
run-post: false | |
channel-priority: "disabled" | |
channels: conda-forge | |
python-version: '3.11' | |
- name: Install anaconda-client | |
run: conda install anaconda-client | |
- name: Checkout repo | |
uses: actions/checkout@v3 | |
with: | |
repository: IntelPython/devops-tools | |
fetch-depth: 0 | |
- name: Cleanup old packages | |
run: | | |
python scripts/cleanup-old-packages.py \ | |
--verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \ | |
--package dppy/${{ env.PACKAGE_NAME }} --label dev |