Skip to content

tests

tests #306

Workflow file for this run

# Generated from:
# https://github.com/zopefoundation/meta/tree/master/config/c-code
###
# Initially copied from
# https://github.com/actions/starter-workflows/blob/main/ci/python-package.yml
# And later based on the version jamadden updated at
# gevent/gevent, and then at zodb/relstorage and zodb/perfmetrics
#
# Original comment follows.
###
###
# This workflow will install Python dependencies, run tests with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
###
###
# Important notes on GitHub actions:
#
# - We only get 2,000 free minutes a month (private repos)
# - We only get 500MB of artifact storage
# - Cache storage is limited to 7 days and 5GB.
# - macOS minutes are 10x as expensive as Linux minutes
# - windows minutes are twice as expensive.
#
# So keep those workflows light. Note: Currently, they seem to be free
# and unlimited for open source projects. But for how long...
#
# In December 2020, github only supports x86/64. If we wanted to test
# on other architectures, we can use docker emulation, but there's no
# native support. It works, but is slow.
#
# Another major downside: You can't just re-run the job for one part
# of the matrix. So if there's a transient test failure that hit, say, 3.8,
# to get a clean run every version of Python runs again. That's bad.
# https://gh.neting.ccmunity/t/ability-to-rerun-just-a-single-job-in-a-workflow/17234/65
name: tests
# Triggers the workflow on push or pull request events and periodically
on:
push:
pull_request:
schedule:
- cron: '0 12 * * 0' # run once a week on Sunday
# Allow to run this workflow manually from the Actions tab
workflow_dispatch:
env:
# Weirdly, this has to be a top-level key, not ``defaults.env``
PYTHONHASHSEED: 8675309
PYTHONUNBUFFERED: 1
PYTHONDONTWRITEBYTECODE: 1
PYTHONDEVMODE: 1
PYTHONFAULTHANDLER: 1
ZOPE_INTERFACE_STRICT_IRO: 1
PIP_UPGRADE_STRATEGY: eager
# Don't get warnings about Python 2 support being deprecated. We
# know. The env var works for pip 20.
PIP_NO_PYTHON_VERSION_WARNING: 1
PIP_NO_WARN_SCRIPT_LOCATION: 1
CFLAGS: -O3 -pipe
CXXFLAGS: -O3 -pipe
# Uploading built wheels for releases.
# TWINE_PASSWORD is encrypted and stored directly in the
# github repo settings.
TWINE_USERNAME: __token__
###
# caching
# This is where we'd set up ccache, but this compiles so fast its not worth it.
###
jobs:
# Because sharing code/steps is so hard, and because it can be
# extremely valuable to be able to get binary wheels without
# uploading to PyPI and even if there is some failure, (e.g., for
# other people to test/debug), the strategy is to divide the process
# into several different jobs. The first builds and saves the binary
# wheels. It has dependent jobs that download and install the wheel
# to run tests, and build docs. Building the
# manylinux wheels is an independent set of jobs.
#
# This division is time-saving for projects that take awhile to
# build, but somewhat less of a clear-cut win given how quick this
# is to compile (at least at this writing).
build-package:
# Sigh. Note that the matrix must be kept in sync
# with `test`, and `docs` must use a subset.
runs-on: ${{ matrix.os }}
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
strategy:
fail-fast: false
matrix:
python-version:
- "pypy-3.10"
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"
- "3.13"
os: [ubuntu-latest, macos-latest, windows-latest]
exclude:
- os: macos-latest
python-version: "pypy-3.10"
steps:
- name: checkout
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
###
# Caching.
# This actually *restores* a cache and schedules a cleanup action
# to save the cache. So it must come before the thing we want to use
# the cache.
###
- name: Get pip cache dir (default)
id: pip-cache-default
if: ${{ !startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >>$GITHUB_OUTPUT
- name: Get pip cache dir (Windows)
id: pip-cache-windows
if: ${{ startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >> $Env:GITHUB_OUTPUT
- name: pip cache (default)
uses: actions/cache@v4
if: ${{ !startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-default.outputs.dir }}
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: pip cache (Windows)
uses: actions/cache@v4
if: ${{ startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-windows.outputs.dir }}
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install Build Dependencies
run: |
pip install -U pip
pip install -U "setuptools <74" wheel twine
- name: Build Acquisition (macOS x86_64)
if: >
startsWith(runner.os, 'Mac')
&& !startsWith(matrix.python-version, 'pypy')
env:
MACOSX_DEPLOYMENT_TARGET: 10.9
_PYTHON_HOST_PLATFORM: macosx-10.9-x86_64
ARCHFLAGS: -arch x86_64
run: |
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult).
python setup.py build_ext -i
python setup.py bdist_wheel
- name: Build Acquisition (macOS arm64)
if: >
startsWith(runner.os, 'Mac')
&& !startsWith(matrix.python-version, 'pypy')
env:
MACOSX_DEPLOYMENT_TARGET: 11.0
_PYTHON_HOST_PLATFORM: macosx-11.0-arm64
ARCHFLAGS: -arch arm64
run: |
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult).
python setup.py build_ext -i
python setup.py bdist_wheel
- name: Build Acquisition (all other versions)
if: >
!startsWith(runner.os, 'Mac')
|| startsWith(matrix.python-version, 'pypy')
run: |
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult).
python setup.py build_ext -i
python setup.py bdist_wheel
- name: Install Acquisition and dependencies
run: |
# Install to collect dependencies into the (pip) cache.
pip install .[test]
- name: Check Acquisition build
run: |
ls -l dist
twine check dist/*
- name: Upload Acquisition wheel (macOS x86_64)
if: >
startsWith(runner.os, 'Mac')
uses: actions/upload-artifact@v4
with:
# The x86_64 wheel is uploaded with a different name just so it can be
# manually downloaded when desired. The wheel itself *cannot* be tested
# on the GHA runner, which uses arm64 architecture.
name: Acquisition-${{ runner.os }}-${{ matrix.python-version }}-x86_64.whl
path: dist/*x86_64.whl
- name: Upload Acquisition wheel (macOS arm64)
if: >
startsWith(runner.os, 'Mac')
&& !startsWith(matrix.python-version, 'pypy')
uses: actions/upload-artifact@v4
with:
name: Acquisition-${{ runner.os }}-${{ matrix.python-version }}.whl
path: dist/*arm64.whl
- name: Upload Acquisition wheel (all other platforms)
if: >
!startsWith(runner.os, 'Mac')
uses: actions/upload-artifact@v4
with:
name: Acquisition-${{ runner.os }}-${{ matrix.python-version }}.whl
path: dist/*whl
- name: Publish package to PyPI (Non-Linux)
# We cannot use pypa/gh-action-pypi-publish because that
# is a container action, and those don't run on macOS
# or Windows GHA runners.
if: >
github.event_name == 'push'
&& startsWith(github.ref, 'refs/tags')
&& !startsWith(runner.os, 'Linux')
&& !startsWith(matrix.python-version, 'pypy')
env:
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
run: |
twine upload --skip-existing dist/*
test:
needs: build-package
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version:
- "pypy-3.10"
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"
- "3.13"
os: [ubuntu-latest, macos-latest, windows-latest]
exclude:
- os: macos-latest
python-version: "pypy-3.10"
steps:
- name: checkout
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
###
# Caching.
# This actually *restores* a cache and schedules a cleanup action
# to save the cache. So it must come before the thing we want to use
# the cache.
###
- name: Get pip cache dir (default)
id: pip-cache-default
if: ${{ !startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >>$GITHUB_OUTPUT
- name: Get pip cache dir (Windows)
id: pip-cache-windows
if: ${{ startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >> $Env:GITHUB_OUTPUT
- name: pip cache (default)
uses: actions/cache@v4
if: ${{ !startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-default.outputs.dir }}
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: pip cache (Windows)
uses: actions/cache@v4
if: ${{ startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-windows.outputs.dir }}
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: Download Acquisition wheel
uses: actions/download-artifact@v4
with:
name: Acquisition-${{ runner.os }}-${{ matrix.python-version }}.whl
path: dist/
- name: Install Acquisition
run: |
pip install -U wheel "setuptools <74"
pip install -U coverage[toml]
pip install -U 'cffi; platform_python_implementation == "CPython"'
# Unzip into src/ so that testrunner can find the .so files
# when we ask it to load tests from that directory. This
# might also save some build time?
unzip -n dist/Acquisition-*whl -d src
pip install -e .[test]
- name: Run tests with C extensions
if: ${{ !startsWith(matrix.python-version, 'pypy') }}
run: |
python -m coverage run -p -m zope.testrunner --test-path=src --auto-color --auto-progress
- name: Run tests without C extensions
run:
# coverage makes PyPy run about 3x slower!
python -m coverage run -p -m zope.testrunner --test-path=src --auto-color --auto-progress
env:
PURE_PYTHON: 1
- name: Report Coverage
run: |
coverage combine
coverage report -i
- name: Submit to Coveralls
# This is a container action, which only runs on Linux.
if: ${{ startsWith(runner.os, 'Linux') }}
uses: AndreMiras/coveralls-python-action@develop
with:
parallel: true
coveralls_finish:
needs: test
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: AndreMiras/coveralls-python-action@develop
with:
parallel-finished: true
manylinux:
runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
# We use a regular Python matrix entry to share as much code as possible.
strategy:
matrix:
python-version: ["3.11"]
image: [manylinux2014_x86_64, manylinux2014_i686, manylinux2014_aarch64]
steps:
- name: checkout
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
###
# Caching.
# This actually *restores* a cache and schedules a cleanup action
# to save the cache. So it must come before the thing we want to use
# the cache.
###
- name: Get pip cache dir (default)
id: pip-cache-default
if: ${{ !startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >>$GITHUB_OUTPUT
- name: Get pip cache dir (Windows)
id: pip-cache-windows
if: ${{ startsWith(runner.os, 'Windows') }}
run: |
echo "dir=$(pip cache dir)" >> $Env:GITHUB_OUTPUT
- name: pip cache (default)
uses: actions/cache@v4
if: ${{ !startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-default.outputs.dir }}
key: ${{ runner.os }}-pip_manylinux-${{ matrix.image }}-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: pip cache (Windows)
uses: actions/cache@v4
if: ${{ startsWith(runner.os, 'Windows') }}
with:
path: ${{ steps.pip-cache-windows.outputs.dir }}
key: ${{ runner.os }}-pip_manylinux-${{ matrix.image }}-${{ matrix.python-version }}
restore-keys: |
${{ runner.os }}-pip-
- name: Update pip
run: pip install -U pip
- name: Build Acquisition (x86_64)
if: matrix.image == 'manylinux2014_x86_64'
# An alternate way to do this is to run the container directly with a uses:
# and then the script runs inside it. That may work better with caching.
# See https://github.com/pyca/bcrypt/blob/f6b5ee2eda76d077c531362ac65e16f045cf1f29/.github/workflows/wheel-builder.yml
env:
DOCKER_IMAGE: quay.io/pypa/${{ matrix.image }}
run: |
bash .manylinux.sh
- name: Build Acquisition (i686)
if: matrix.image == 'manylinux2014_i686'
env:
DOCKER_IMAGE: quay.io/pypa/${{ matrix.image }}
PRE_CMD: linux32
run: |
bash .manylinux.sh
- name: Build Acquisition (aarch64)
if: matrix.image == 'manylinux2014_aarch64'
env:
DOCKER_IMAGE: quay.io/pypa/${{ matrix.image }}
run: |
# First we must enable emulation
docker run --rm --privileged hypriot/qemu-register
bash .manylinux.sh
- name: Upload Acquisition wheels
uses: actions/upload-artifact@v4
with:
path: wheelhouse/*whl
name: manylinux_${{ matrix.image }}_wheels.zip
- name: Restore pip cache permissions
run: sudo chown -R $(whoami) ${{ steps.pip-cache-default.outputs.dir }}
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
if: >
github.event_name == 'push'
&& startsWith(github.ref, 'refs/tags')
with:
user: __token__
password: ${{ secrets.TWINE_PASSWORD }}
skip-existing: true
packages-dir: wheelhouse/