Skip to content

Run ferc_to_sqlite and pudl_etl independent of integration tests #9461

Run ferc_to_sqlite and pudl_etl independent of integration tests

Run ferc_to_sqlite and pudl_etl independent of integration tests #9461

Workflow file for this run

---
name: Code validation
# TODO(rousik): We could name this "Code tests", as that would
# better represent what it does. Maybe even migrate yml file.
on:
workflow_dispatch:
pull_request:
types:
- created
- opened
- synchronize
- ready_for_review
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PUDL_OUTPUT: /home/runner/pudl-work/output/
PUDL_INPUT: /home/runner/pudl-work/data/
DAGSTER_HOME: /home/runner/pudl-work/dagster_home/
ETL_CONFIG: src/pudl/package_data/settings/etl_fast.yml
ETL_COMMANDLINE_OPTIONS: --gcs-cache-path=gs://zenodo-cache.catalyst.coop
COVERAGE_OPTIONS: --concurrency=multiprocessing --parallel-mode
jobs:
ci-static:
name: Static tests
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
tox-env:
- linters
- docs
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install Conda environment using mamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: test/test-environment.yml
cache-environment: true
condarc: |
channels:
- conda-forge
- defaults
channel_priority: strict
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Build ${{ matrix.tox-env}} with Tox
run: |
tox -e ${{ matrix.tox-env }}
- name: Upload coverage
uses: actions/upload-artifact@v3
if: ${{ matrix.tox-env == 'docs' }}
with:
name: coverage-docs
path: coverage.xml
ci-unit:
name: Unit tests
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install Conda environment using mamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: test/test-environment.yml
cache-environment: true
condarc: |
channels:
- conda-forge
- defaults
channel_priority: strict
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Log SQLite3 version
run: |
which sqlite3
sqlite3 --version
- name: Run unit tests with Tox
run: |
tox -e unit -- --durations 0
- name: Upload coverage
uses: actions/upload-artifact@v3
with:
name: coverage-unit
path: coverage.xml
ci-integration:
name: Integration (fast ETL run)
needs:
- ci-unit
runs-on:
group: large-runner-group
labels: ubuntu-22.04-4core
if: github.event.pull_request.draft == false
permissions:
contents: read
id-token: write
defaults:
run:
shell: bash -l {0}
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install Conda environment using mamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: test/test-environment.yml
cache-environment: true
condarc: |
channels:
- conda-forge
- defaults
channel_priority: strict
- name: Install PUDL and its dependencies
run: pip install .[test,datasette]
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Log SQLite3 version
run: |
which sqlite3
sqlite3 --version
- name: Compile Zenodo datastore DOIs for cache invalidation
run:
grep -e '.*10\.\(5281\|5072\)/zenodo\..*' src/pudl/workspace/datastore.py
| sed -e 's/",*$//g' | sed -e 's/^.*"//g' | sort > datastore-dois.txt
- name: Restore Zenodo datastore from cache if possible
uses: actions/cache@v3
id: cache-zenodo-datastore
with:
path: ${{ env.PUDL_INPUT }}
key: zenodo-datastore-${{ hashFiles('datastore-dois.txt') }}
- name: Make input, output and dagster dirs
run: mkdir -p ${{ env.PUDL_OUTPUT }} ${{ env.PUDL_INPUT}} ${{ env.DAGSTER_HOME }}
- name: List workspace contents
run: find /home/runner/pudl-work
- name: Set default GCP credentials
id: gcloud-auth
continue-on-error: true
uses: "google-github-actions/auth@v1"
with:
workload_identity_provider: "projects/345950277072/locations/global/workloadIdentityPools/gh-actions-pool/providers/gh-actions-provider"
service_account: "tox-pytest-github-action@catalyst-cooperative-pudl.iam.gserviceaccount.com"
- name: Run ferc_to_sqlite
run: |
coverage run ${{ env.COVERAGE_OPTIONS }} \
src/pudl/ferc_to_sqlite/cli.py --clobber ${{ env.ETL_COMMANDLINE_OPTIONS }} ${{ env.ETL_CONFIG }}
- name: Run pudl_etl
run: |
alembic upgrade head
coverage run ${{ env.COVERAGE_OPTIONS }} \
src/pudl/cli/etl.py ${{ env.ETL_COMMANDLINE_OPTIONS }} ${{ env.ETL_CONFIG }}
- name: Run integration tests
run: |
coverage run ${{ env.COVERAGE_OPTIONS }} \
\
-m pytest --live-dbs test/integration
- name: Generate coverage
run: |
coverage combine
coverage xml
- name: Upload coverage
uses: actions/upload-artifact@v3
with:
name: coverage-integration
path: coverage.xml
ci-coverage:
name: Upload coverage to CodeCov
runs-on: ubuntu-latest
needs:
- ci-unit
- ci-integration
- ci-static
steps:
- uses: actions/checkout@v4
- name: Download coverage
id: download-unit
uses: actions/download-artifact@v3
with:
path: coverage
- name: List downloaded files
run: |
ls -R
- name: Upload test coverage report to CodeCov
uses: codecov/codecov-action@v3
with:
directory: coverage
# TODO(rousik): The following slack notification gives no value and
# needs to be fixed. Until then, it might be better to do nothing
# at all.
# ci-notify:
# name: Notify slack
# runs-on: ubuntu-latest
# if: ${{ always() }}
# needs:
# - ci-unit
# - ci-integration
# steps:
# - name: Inform the Codemonkeys
# uses: 8398a7/action-slack@v3
# continue-on-error: true
# with:
# status: custom
# fields: workflow,job,commit,repo,ref,author,took
# custom_payload: |
# {
# username: 'action-slack',
# icon_emoji: ':octocat:',
# attachments: [{
# color: '${{ needs.ci-test.result }}' === 'success' ? 'good' : '${{ needs.ci-test.result }}' === 'failure' ? 'danger' : 'warning',
# text: `${process.env.AS_REPO}@${process.env.AS_REF}\n ${process.env.AS_WORKFLOW} (${process.env.AS_COMMIT})\n by ${process.env.AS_AUTHOR}\n Status: ${{ needs.ci-test.result }}`,
# }]
# }
# env:
# GITHUB_TOKEN: ${{ github.token }} # required
# SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} # required
# MATRIX_CONTEXT: ${{ toJson(matrix) }} # required