Support building with GHC 9.6 #3175
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Overall configuration notes: | |
# - Artifact uploads for binaries are from GHC 9.2.8 | |
# - Builds for Ubuntu happen on 22.04. We also include a single configuration | |
# for 20.04 to increase our Linux coverage. | |
# - Docker builds happen nightly, on manual invocation, and on release branch commits | |
# Please update this comment as those details change. | |
name: SAWScript | |
on: | |
push: | |
tags: ["v?[0-9]+.[0-9]+(.[0-9]+)?"] | |
branches: [master, "release-**"] | |
pull_request: | |
schedule: | |
- cron: "0 10 * * *" # 10am UTC -> 2/3am PST | |
workflow_dispatch: | |
env: | |
# The CABAL_CACHE_VERSION and SOLVER_CACHE_VERSION environment variables | |
# can be updated to force the use of a new cabal or solver cache if the | |
# respective current cache contents become corrupted/invalid. This can | |
# sometimes happen when (for example) the OS version is changed but | |
# older .so files are cached, which can have various effects | |
# (e.g. cabal complains it can't find a valid version of the "happy" | |
# tool). | |
CABAL_CACHE_VERSION: 1 | |
SOLVER_CACHE_VERSION: 1 | |
DISABLED_TESTS: "test0000 test_FNV_a1_rev test0010_jss_cnf_exp test0039_rust test_boilerplate test_external_abc" | |
# Solver package snapshot date - also update in the following locations: | |
# ./saw/Dockerfile | |
# ./saw-remote-api/Dockerfile | |
# ./s2nTests/scripts/blst-entrypoint.sh | |
# ./s2nTests/docker/saw.dockerfile | |
SOLVER_PKG_VERSION: "snapshot-20221212" | |
OCAML_VERSION: 4.09.x | |
jobs: | |
config: | |
runs-on: ubuntu-22.04 | |
outputs: | |
name: ${{ steps.config.outputs.name }} | |
version: ${{ steps.config.outputs.version }} | |
event-tag: ${{ steps.config.outputs.tag }} | |
event-schedule: ${{ steps.config.outputs.schedule }} | |
publish: ${{ steps.config.outputs.publish }} | |
release: ${{ steps.config.outputs.release }} | |
retention-days: ${{ steps.config.outputs.retention-days }} | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
fetch-depth: 0 | |
- name: config | |
id: config | |
env: | |
EVENT_TAG: ${{ startsWith(github.event.ref, 'refs/tags/') }} | |
EVENT_SCHEDULE: ${{ github.event_name == 'schedule' }} | |
EVENT_DISPATCH: ${{ github.event_name == 'workflow_dispatch' }} | |
RELEASE: ${{ startsWith(github.event.ref, 'refs/heads/release-') }} | |
run: | | |
set -x | |
.github/ci.sh output name saw-$(.github/ci.sh ver) | |
.github/ci.sh output version $(.github/ci.sh ver) | |
.github/ci.sh output tag $EVENT_TAG | |
.github/ci.sh output schedule $EVENT_SCHEDULE | |
.github/ci.sh output publish $({ $EVENT_TAG || $EVENT_SCHEDULE; } && echo true || echo false) | |
.github/ci.sh output release $([[ "refs/heads/release-$(.github/ci.sh ver)" == "${{ github.event.ref }}" ]] && echo true || echo false) | |
.github/ci.sh output retention-days $($RELEASE && echo 90 || echo 5) | |
build: | |
runs-on: ${{ matrix.os }} | |
needs: [config] | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-22.04, macos-12, windows-2019] | |
cabal: ["3.10.1.0"] | |
ghc: ["9.2.8", "9.4.8", "9.6.3"] | |
run-tests: [true] | |
hpc: [false] | |
include: | |
# We include one job from an older Ubuntu LTS release to increase our | |
# coverage of possible Linux configurations. Since we already run the | |
# tests with the newest LTS release, we won't bother testing this one. | |
- os: ubuntu-20.04 | |
ghc: "9.2.8" | |
cabal: "3.10.1.0" | |
run-tests: false | |
hpc: false | |
# Include one job with HPC enabled | |
- os: ubuntu-22.04 | |
ghc: "9.4.8" | |
cabal: "3.10.1.0" | |
run-tests: true | |
hpc: true | |
outputs: | |
cabal-test-suites-json: ${{ steps.cabal-test-suites.outputs.targets-json }} | |
steps: | |
- uses: actions/checkout@v2 | |
- run: | | |
git submodule update --init | |
- id: config | |
shell: bash | |
run: | | |
NAME="${{ needs.config.outputs.name }}-${{ matrix.os }}-x86_64" | |
.github/ci.sh output name $NAME | |
echo "NAME=${{ needs.config.outputs.name }}-${{ matrix.os }}-x86_64" >> $GITHUB_ENV | |
- uses: haskell/actions/setup@v2 | |
id: setup-haskell | |
with: | |
ghc-version: ${{ matrix.ghc }} | |
cabal-version: ${{ matrix.cabal }} | |
- name: Post-GHC installation fixups on Windows | |
shell: bash | |
if: runner.os == 'Windows' | |
run: | | |
# A workaround for https://github.com/Mistuke/CabalChoco/issues/5 | |
cabal user-config update -a "extra-include-dirs: \"\"" | |
cabal user-config update -a "extra-lib-dirs: \"\"" | |
- shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/cache/restore@v3 | |
name: Restore cabal store cache | |
with: | |
path: | | |
${{ steps.setup-haskell.outputs.cabal-store }} | |
dist-newstyle | |
key: ${{ env.CABAL_CACHE_VERSION }}-cabal-${{ matrix.os }}-${{ matrix.ghc }}-${{ hashFiles(format('cabal.GHC-{0}.config', matrix.ghc)) }}-${{ github.sha }} | |
restore-keys: | | |
${{ env.CABAL_CACHE_VERSION }}-cabal-${{ matrix.os }}-${{ matrix.ghc }}-${{ hashFiles(format('cabal.GHC-{0}.config', matrix.ghc)) }}- | |
- if: needs.config.outputs.release == 'true' | |
shell: bash | |
run: | | |
sed -i.bak \ | |
-e 's/^hashText = .*$/hashText = ""/' \ | |
-e '/import GitRev.*$/d' \ | |
saw/SAWScript/Version.hs | |
rm -f saw/SAWScript/Version.hs.bak | |
- shell: bash | |
run: .github/ci.sh build | |
env: | |
ENABLE_HPC: ${{ matrix.hpc }} | |
- shell: bash | |
env: | |
RELEASE: ${{ needs.config.outputs.release }} | |
run: .github/ci.sh build_cryptol | |
- uses: GaloisInc/.github/actions/cabal-collect-bins@v1.1.1 | |
id: cabal-test-suites | |
with: | |
targets: | | |
integration_tests | |
test-sawcore | |
cryptol-saw-core-tc-test | |
prover_tests | |
dest: dist-tests | |
- uses: actions/upload-artifact@v2 | |
if: "matrix.ghc == '9.2.8'" | |
with: | |
path: dist-tests | |
name: dist-tests-${{ matrix.os }} | |
- shell: bash | |
run: .github/ci.sh setup_dist_bins | |
- shell: bash | |
run: .github/ci.sh bundle_files | |
- shell: bash | |
run: .github/ci.sh zip_dist $NAME | |
- shell: bash | |
run: .github/ci.sh zip_dist_with_solvers $NAME-with-solvers | |
- if: matrix.ghc == '9.2.8' && github.event.pull_request.head.repo.fork == false | |
shell: bash | |
env: | |
SIGNING_PASSPHRASE: ${{ secrets.SIGNING_PASSPHRASE }} | |
SIGNING_KEY: ${{ secrets.SIGNING_KEY }} | |
run: .github/ci.sh sign $NAME.tar.gz | |
- if: matrix.ghc == '9.2.8' && github.event.pull_request.head.repo.fork == false | |
shell: bash | |
env: | |
SIGNING_PASSPHRASE: ${{ secrets.SIGNING_PASSPHRASE }} | |
SIGNING_KEY: ${{ secrets.SIGNING_KEY }} | |
run: .github/ci.sh sign $NAME-with-solvers.tar.gz | |
########################################################################## | |
# We upload an archive containing SAW, and also and archive containing SAW | |
# and the set of possible SMT solvers, but only for our "primary" | |
# distribution (currently: GHC 9.2.8). These archives are utilized in | |
# subsequent CI jobs, but are also published for external users, and are | |
# therefore signed. | |
# | |
# In addition, if we built with HPC, we upload a tarball containing the | |
# HPC-enabled binaries and HPC-specific files generated during the build | |
# process. These are mostly used for subsequent CI jobs that will | |
# generate a coverage report, and the binaries are essentially the same as | |
# those collected for the previous operation, but they are captured in | |
# their original cabal-generated locations where they are expected to live | |
# for subsequent coverage collection. | |
# In the next 3 steps we check that `matrix.hpc == false` so that if the | |
# distribution version matches the HPC version, the HPC build artifacts do | |
# not clobber the non-HPC distribution artifacts. | |
- if: matrix.ghc == '9.2.8' && matrix.hpc == false | |
uses: actions/upload-artifact@v2 | |
with: | |
name: ${{ steps.config.outputs.name }} (GHC ${{ matrix.ghc }}) | |
path: "${{ steps.config.outputs.name }}.tar.gz*" | |
if-no-files-found: error | |
retention-days: ${{ needs.config.outputs.retention-days }} | |
- if: matrix.ghc == '9.2.8' && matrix.hpc == false | |
uses: actions/upload-artifact@v2 | |
with: | |
name: ${{ steps.config.outputs.name }}-with-solvers (GHC ${{ matrix.ghc }}) | |
path: "${{ steps.config.outputs.name }}-with-solvers.tar.gz*" | |
if-no-files-found: error | |
retention-days: ${{ needs.config.outputs.retention-days }} | |
- if: matrix.ghc == '9.2.8' && matrix.run-tests && matrix.hpc == false | |
uses: actions/upload-artifact@v2 | |
with: | |
path: dist/bin | |
name: ${{ runner.os }}-bins | |
- if: matrix.hpc == true | |
shell: bash | |
run: .github/ci.sh collect_hpc_files | |
- if: matrix.hpc == true | |
uses: actions/upload-artifact@v2 | |
with: | |
path: hpc.tar.gz | |
name: ${{ runner.os }}-hpc.tar.gz | |
- uses: actions/cache/save@v3 | |
name: Save cabal store cache | |
if: always() | |
with: | |
path: | | |
${{ steps.setup-haskell.outputs.cabal-store }} | |
dist-newstyle | |
key: ${{ env.CABAL_CACHE_VERSION }}-cabal-${{ matrix.os }}-${{ matrix.ghc }}-${{ hashFiles(format('cabal.GHC-{0}.config', matrix.ghc)) }}-${{ github.sha }} | |
mr-solver-tests: | |
needs: [build] | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-22.04, macos-12] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: dist/bin | |
- name: Update PATH to include SAW | |
shell: bash | |
run: | | |
chmod +x dist/bin/* | |
echo $GITHUB_WORKSPACE/dist/bin >> $GITHUB_PATH | |
- working-directory: examples/mr_solver | |
shell: bash | |
run: | | |
saw monadify.saw | |
saw mr_solver_unit_tests.saw | |
heapster-tests: | |
needs: [build] | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-22.04, macos-12] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: dist/bin | |
- name: Update PATH to include SAW | |
shell: bash | |
run: | | |
chmod +x dist/bin/* | |
echo $GITHUB_WORKSPACE/dist/bin >> $GITHUB_PATH | |
- uses: ocaml/setup-ocaml@v2 | |
with: | |
ocaml-compiler: 4.14.x | |
- run: opam repo add coq-released https://coq.inria.fr/opam/released | |
- run: opam install -y coq=8.15.2 coq-bits=1.1.0 | |
# If you change the entree-specs commit below, make sure you update the | |
# documentation in saw-core-coq/README.md accordingly. | |
- run: opam pin -y entree-specs https://github.com/GaloisInc/entree-specs.git#52c4868f1f65c7ce74e90000214de27e23ba98fb | |
# FIXME: the following steps generate Coq libraries for the SAW core to | |
# Coq translator and builds them; if we do other Coq tests, these steps | |
# should become their own build artifact, to avoid re-compiling the Coq | |
# libraries | |
- working-directory: saw-core-coq/coq | |
shell: bash | |
run: opam exec -- make -j | |
- working-directory: heapster-saw/examples | |
shell: bash | |
run: opam exec -- make -j | |
saw-remote-api-tests: | |
runs-on: ${{ matrix.os }} | |
needs: [build] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: | |
- name: Install and test | |
test: saw-remote-api/scripts/run_rpc_tests.sh | |
os: ubuntu-22.04 | |
# TODO: saw-remote-api unit tests are disabled pending a fix for #1699 | |
- name: Install on MacOS | |
test: | | |
cd saw-remote-api/python/ | |
poetry install | |
poetry run mypy --install-types --non-interactive saw_client/ || true | |
poetry run mypy --install-types --non-interactive saw_client/ | |
os: macos-12 | |
- name: Check docs | |
test: saw-remote-api/scripts/check_docs.sh | |
os: ubuntu-22.04 | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: dist/bin | |
- uses: actions/setup-python@v2 | |
with: | |
python-version: '3.11' | |
- uses: abatilo/actions-poetry@v2.0.0 | |
with: | |
poetry-version: 1.4.2 | |
- name: ${{ matrix.name }} | |
shell: bash | |
run: | | |
chmod +x dist/bin/* | |
export PATH="$PWD/dist/bin:$PATH" | |
echo "$PWD/dist/bin" >> "$GITHUB_PATH" | |
abc -h || true | |
yices --version | |
yices-smt2 --version | |
saw --version | |
saw-remote-api --help | |
${{ matrix.test }} | |
cabal-test: | |
runs-on: ${{ matrix.os }} | |
needs: [build] | |
strategy: | |
fail-fast: false | |
matrix: | |
suite: ${{ fromJson(needs.build.outputs.cabal-test-suites-json) }} | |
os: [ubuntu-22.04] | |
continue-on-error: [false] | |
include: | |
- suite: integration_tests | |
os: macos-12 | |
continue-on-error: true # https://github.com/GaloisInc/saw-script/issues/1135 | |
- suite: integration_tests | |
os: windows-2019 | |
timeout-minutes: 60 | |
continue-on-error: true # https://github.com/GaloisInc/saw-script/issues/1135 | |
exclude: | |
- suite: integration_tests | |
os: macos-12 | |
continue-on-error: false | |
- suite: integration_tests | |
os: windows-2019 | |
continue-on-error: false | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: dist/bin | |
- shell: bash | |
if: "runner.os != 'Windows'" | |
run: chmod +x dist/bin/* | |
- shell: bash | |
if: runner.os != 'Windows' | |
run: chmod +x bin/* | |
- uses: actions/download-artifact@v2 | |
with: | |
name: dist-tests-${{ matrix.os }} | |
path: dist-tests | |
- shell: bash | |
if: runner.os != 'Windows' | |
run: chmod +x dist-tests/* | |
- uses: actions/setup-java@v1 | |
if: "matrix.suite == 'integration_tests'" | |
with: | |
java-version: "8" | |
java-package: jdk | |
architecture: x64 | |
- uses: actions/cache/restore@v3 | |
name: Restore SMT solver result cache | |
if: "matrix.suite == 'integration_tests'" | |
with: | |
path: ${{ matrix.suite }}.cache | |
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-${{ github.sha }} | |
restore-keys: | | |
${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}- | |
- shell: bash | |
name: Enable solver caching | |
if: "matrix.suite == 'integration_tests'" | |
run: | | |
echo "SAW_SOLVER_CACHE_PATH=$(pwd)/${{ matrix.suite }}.cache" >> "$GITHUB_ENV" | |
dist/bin/saw --clean-mismatched-versions-solver-cache=$(pwd)/${{ matrix.suite }}.cache | |
- name: ${{ matrix.suite }} | |
continue-on-error: ${{ matrix.continue-on-error }} | |
shell: bash | |
run: | | |
export PATH="$PWD/bin:$PWD/dist/bin:$PATH" | |
dist-tests/${{ matrix.suite }} | |
- uses: actions/cache/save@v3 | |
name: Save SMT solver result cache | |
if: "matrix.suite == 'integration_tests'" | |
with: | |
path: ${{ matrix.suite }}.cache | |
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-${{ github.sha }} | |
# The coverage job is similar to the cabal-test job, but it only runs the HPC | |
# enabled SAW build against the integration test suite. It then collects the | |
# test coverage results, generates an HTML summary, and publishes the results | |
# to github pages. | |
coverage: | |
name: "Run integration tests with coverage reporting" | |
needs: build | |
runs-on: ${{ matrix.os }} | |
if: github.event_name == 'pull_request' | |
permissions: | |
pages: write | |
id-token: write | |
strategy: | |
matrix: | |
suite: ['integration_tests'] | |
os: [ubuntu-22.04] | |
steps: | |
# Need a copy of the source to generate coverage HTML | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- name: Install system dependencies | |
shell: bash | |
run: .github/ci.sh install_system_deps | |
env: | |
BUILD_TARGET_OS: ${{ matrix.os }} | |
- uses: actions/download-artifact@v2 | |
with: | |
name: dist-tests-${{ matrix.os }} | |
path: dist-tests | |
- uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-hpc.tar.gz" | |
- name: Setup test environment | |
shell: bash | |
run: | | |
tar xvf hpc.tar.gz | |
chmod +x dist/bin/* | |
chmod +x bin/* | |
chmod +x dist-tests/* | |
- uses: actions/setup-java@v1 | |
with: | |
java-version: "8" | |
java-package: jdk | |
architecture: x64 | |
# NOTE: This job uses the SMT solver cache to improve performance but it | |
# does not save the updated SMT solver cache. This is because the | |
# `cabal-test` also runs the integration tests and uploads an updated | |
# cache. Because the test suite is the same, the resulting cache files | |
# would also be identical. | |
- uses: actions/cache/restore@v3 | |
name: Restore SMT solver result cache | |
with: | |
path: ${{ matrix.suite }}.cache | |
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}-${{ github.sha }} | |
restore-keys: | | |
${{ env.SOLVER_CACHE_VERSION }}-solver-${{ matrix.suite }}-${{ matrix.os }}- | |
- shell: bash | |
name: Enable solver caching | |
run: | | |
echo "SAW_SOLVER_CACHE_PATH=$(pwd)/${{ matrix.suite }}.cache" >> "$GITHUB_ENV" | |
dist/bin/saw --clean-mismatched-versions-solver-cache=$(pwd)/${{ matrix.suite }}.cache | |
- name: Run integration tests | |
shell: bash | |
run: | | |
export PATH="$PWD/bin:$PWD/dist/bin:$PATH" | |
dist-tests/integration_tests | |
- name: Compute coverage | |
shell: bash | |
run: | | |
./compute-coverage.sh | |
- uses: actions/upload-artifact@v2 | |
with: | |
path: hpc-html | |
name: coverage-html-${{ github.event.number }} | |
- name: Gather HPC coverage files | |
shell: bash | |
run: .github/ci.sh collect_all_html | |
env: | |
GH_TOKEN: ${{ github.token }} | |
- name: Upload pages artifact | |
uses: actions/upload-pages-artifact@v1 | |
with: | |
path: all-html | |
- name: Deploy to github pages | |
id: deployment | |
uses: actions/deploy-pages@v2 | |
build-push-image: | |
runs-on: ubuntu-22.04 | |
needs: [config] | |
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || needs.config.outputs.release == 'true' | |
strategy: | |
fail-fast: false | |
matrix: | |
include: | |
- file: saw/Dockerfile | |
image: ghcr.io/galoisinc/saw | |
cache: ghcr.io/galoisinc/cache-saw | |
- file: saw-remote-api/Dockerfile | |
image: ghcr.io/galoisinc/saw-remote-api | |
cache: ghcr.io/galoisinc/cache-saw-remote-api | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: true | |
- uses: rlespinasse/github-slug-action@v3.x | |
- id: common-tag | |
run: | | |
echo "::set-output name=common-tag::$GITHUB_REF_SLUG" | |
echo "COMMON_TAG=$GITHUB_REF_SLUG" >> $GITHUB_ENV | |
- uses: docker/setup-buildx-action@v1 | |
- uses: crazy-max/ghaction-docker-meta@v1 | |
name: Labels | |
id: labels | |
with: | |
images: ${{ matrix.image }} | |
- uses: docker/login-action@v1 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ secrets.GITHUB_TOKEN }} | |
- if: needs.config.outputs.release == 'true' | |
shell: bash | |
run: | | |
sed -i.bak \ | |
-e 's/^hashText = .*$/hashText = ""/' \ | |
-e '/import GitRev.*$/d' \ | |
saw/SAWScript/Version.hs | |
rm -f saw/SAWScript/Version.hs.bak | |
- uses: docker/build-push-action@v2 | |
with: | |
context: . | |
tags: ${{ matrix.image }}:${{ steps.common-tag.outputs.common-tag }} | |
labels: ${{ steps.labels.outputs.labels }} | |
load: true | |
push: false | |
file: ${{ matrix.file }} | |
build-args: ${{ matrix.build-args }} | |
cache-from: | | |
type=registry,ref=${{ matrix.cache }}:${{ steps.prefix.outputs.prefix }}master | |
type=registry,ref=${{ matrix.cache }}:${{ steps.common-tag.outputs.common-tag }} | |
- name: Cache image build | |
uses: docker/build-push-action@v2 | |
continue-on-error: true # Tolerate cache upload failures - this should be handled better | |
with: | |
context: . | |
file: ${{ matrix.file }} | |
build-args: ${{ matrix.build-args }} | |
cache-to: type=registry,ref=${{ matrix.cache }}:${{ steps.common-tag.outputs.common-tag }},mode=max | |
- if: matrix.image == 'ghcr.io/galoisinc/saw-remote-api' | |
uses: actions/setup-python@v2 | |
with: | |
python-version: '3.9' | |
- if: matrix.image == 'ghcr.io/galoisinc/saw-remote-api' | |
uses: abatilo/actions-poetry@v2.0.0 | |
with: | |
poetry-version: 1.4.2 | |
- if: matrix.image == 'ghcr.io/galoisinc/saw-remote-api' | |
name: Test saw-remote-api | |
run: ./saw-remote-api/scripts/test_docker.sh ${{ matrix.image }}:$COMMON_TAG | |
- if: needs.config.outputs.event-schedule == 'true' | |
name: ${{ matrix.image }}:nightly | |
run: | | |
docker tag ${{ matrix.image }}:$COMMON_TAG ${{ matrix.image }}:nightly | |
docker push ${{ matrix.image }}:nightly | |
- if: needs.config.outputs.release == 'true' | |
name: ${{ matrix.image }}:${{ needs.config.outputs.version }} | |
run: | | |
docker tag ${{ matrix.image }}:$COMMON_TAG ${{ matrix.image }}:${{ needs.config.outputs.version }} | |
docker push ${{ matrix.image }}:${{ needs.config.outputs.version }} | |
docker tag ${{ matrix.image }}:$COMMON_TAG ${{ matrix.image }}:latest | |
docker push ${{ matrix.image }}:latest | |
s2n-tests: | |
name: "Test s2n proofs" | |
timeout-minutes: 150 | |
needs: build | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
s2n-target: | |
- hmac | |
- drbg | |
- sike | |
- bike | |
- tls | |
- hmac-failure | |
- awslc | |
- blst | |
ghc: ["9.2.8"] | |
steps: | |
- uses: actions/checkout@v2 | |
- run: | | |
mkdir -p s2nTests/bin | |
- name: Download previously-built SAW | |
uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: ./s2nTests/bin | |
- shell: bash | |
working-directory: s2nTests | |
run: | | |
docker-compose pull | |
grep -h '^FROM' docker/*.dockerfile | sort -u | awk '{print $2}' | xargs -n1 -P8 docker pull | |
- shell: bash | |
name: "make s2n" | |
working-directory: s2nTests | |
run: docker-compose build s2n | |
- uses: actions/cache/restore@v3 | |
name: Restore SMT solver result cache | |
with: | |
path: s2nTests/cache | |
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-s2n-${{ matrix.s2n-target }}-${{ matrix.ghc }}-${{ github.sha }} | |
restore-keys: | |
${{ env.SOLVER_CACHE_VERSION }}-solver-s2n-${{ matrix.s2n-target }}-${{ matrix.ghc }}- | |
- shell: bash | |
name: "s2n tests: ${{ matrix.s2n-target }}" | |
working-directory: s2nTests | |
run: | | |
chmod +x bin/* | |
mkdir -p cache && touch cache/data.mdb cache/lock.mdb | |
chmod -R +rw cache | |
make ${{ matrix.s2n-target }} | |
- uses: actions/cache/save@v3 | |
name: Save SMT solver result cache | |
if: always() | |
with: | |
path: s2nTests/cache | |
key: ${{ env.SOLVER_CACHE_VERSION }}-solver-s2n-${{ matrix.s2n-target }}-${{ matrix.ghc }}-${{ github.sha }} | |
exercises: | |
name: "Test SAW exercises" | |
needs: build | |
runs-on: ubuntu-22.04 | |
strategy: | |
matrix: | |
ghc: ["9.2.8"] | |
steps: | |
- uses: actions/checkout@v2 | |
- run: | | |
mkdir -p exercises/bin | |
- name: Download previously-built SAW | |
uses: actions/download-artifact@v2 | |
with: | |
name: "${{ runner.os }}-bins" | |
path: ./exercises/bin | |
- shell: bash | |
name: "make exercises container" | |
working-directory: exercises | |
run: docker build -t exercises . | |
- shell: bash | |
name: "run exercises" | |
working-directory: exercises | |
run: | | |
chmod +x bin/* | |
docker run -v $PWD/bin:/saw-bin exercises | |
# Indicates sufficient CI success for the purposes of mergify merging the pull | |
# request, see .github/mergify.yml. This is done instead of enumerating each | |
# instance of each job in the mergify configuration for a number of reasons: | |
# - continue-on-error is respected, won't block merge | |
# - changes to jobs or job instances don't require a mergify config update | |
# - dependencies through `needs:` are validated, CI will fail if it's invalid | |
mergify: | |
runs-on: ubuntu-22.04 | |
needs: | |
- build | |
- heapster-tests | |
- saw-remote-api-tests | |
- cabal-test | |
- s2n-tests | |
- exercises | |
steps: | |
- run: "true" |