Skip to content

Commit

Permalink
Run integration tests in CI (#266)
Browse files Browse the repository at this point in the history
closes: #198 
closes: #206
  • Loading branch information
pankajkoti authored Oct 21, 2024
1 parent 997614a commit 55a1a2f
Show file tree
Hide file tree
Showing 8 changed files with 357 additions and 9 deletions.
90 changes: 86 additions & 4 deletions .github/workflows/cicd.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Test and release related jobs
name: CI jobs

on:
push: # Run on pushes to the default branch
Expand Down Expand Up @@ -27,7 +27,7 @@ jobs:
architecture: "x64"

- run: pip3 install hatch
- run: CONFIG_ROOT_DIR=`pwd`"/dags" hatch run tests.py3.12-2.10:static-check
- run: hatch run tests.py3.12-2.10:static-check

Run-Unit-Tests:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -91,11 +91,11 @@ jobs:
run: |
python -m pip install uv
uv pip install --system hatch
CONFIG_ROOT_DIR=`pwd`"/dags" hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze
hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze
- name: Test DAG Factory against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }}
run: |
CONFIG_ROOT_DIR=`pwd`"/dags" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov
hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov
- name: Upload coverage to Github
uses: actions/upload-artifact@v4
Expand All @@ -104,10 +104,92 @@ jobs:
path: .coverage
include-hidden-files: true

Run-Integration-Tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
airflow-version: [ "2.2", "2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9", "2.10" ]
exclude:
# Apache Airflow versions prior to 2.3.0 have not been tested with Python 3.10
# See: https://airflow.apache.org/docs/apache-airflow/2.2.0/installation/prerequisites.html
- python-version: "3.10"
airflow-version: "2.2"
# Apache Airflow versions prior to 2.6.2 have not been tested with Python 3.11
- python-version: "3.11"
airflow-version: "2.2"
- python-version: "3.11"
airflow-version: "2.3"
- python-version: "3.11"
airflow-version: "2.4"
- python-version: "3.11"
airflow-version: "2.5"
- python-version: "3.11"
airflow-version: "2.6"
# Apache Airflow versions prior to 2.9.0 have not been tested with Python 3.12.
# Official support for Python 3.12 and the corresponding constraints.txt are available only for Apache Airflow >= 2.9.0.
# See: https://github.com/apache/airflow/tree/2.9.0?tab=readme-ov-file#requirements
# See: https://github.com/apache/airflow/tree/2.8.4?tab=readme-ov-file#requirements
- python-version: "3.12"
airflow-version: "2.2"
- python-version: "3.12"
airflow-version: "2.3"
- python-version: "3.12"
airflow-version: "2.4"
- python-version: "3.12"
airflow-version: "2.5"
- python-version: "3.12"
airflow-version: "2.6"
- python-version: "3.12"
airflow-version: "2.7"
- python-version: "3.12"
airflow-version: "2.8"
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}

- uses: actions/cache@v4
with:
path: |
~/.cache/pip
.local/share/hatch/
key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('dagfactory/__init__.py') }}

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install packages and dependencies
run: |
python -m pip install uv
uv pip install --system hatch
hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze
- name: Test DAG Factory against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }}
run: |
hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup
hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration
env:
AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0
AIRFLOW_HOME: ${{ github.workspace }}
CONFIG_ROOT_DIR: ${{ github.workspace }}/dags
PYTHONPATH: ${{ github.workspace }}:${{ github.workspace }}/examples:$PYTHONPATH

- name: Upload coverage to Github
uses: actions/upload-artifact@v4
with:
name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }}
path: .coverage
include-hidden-files: true

Code-Coverage:
if: github.event.action != 'labeled'
needs:
- Run-Unit-Tests
- Run-Integration-Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
Expand Down
6 changes: 3 additions & 3 deletions examples/example_dag_factory.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,15 @@ default:
end_date: 2018-03-05
retries: 1
retry_delay_sec: 300
on_success_callback_name: print_hello_from_callback
on_success_callback_file: $CONFIG_ROOT_DIR/print_hello.py
concurrency: 1
max_active_runs: 1
dagrun_timeout_sec: 600
default_view: "tree"
orientation: "LR"
schedule_interval: "0 1 * * *"
on_success_callback_name: print_hello
on_success_callback_file: $CONFIG_ROOT_DIR/print_hello.py
on_failure_callback_name: print_hello
on_failure_callback_name: print_hello_from_callback
on_failure_callback_file: $CONFIG_ROOT_DIR/print_hello.py

example_dag:
Expand Down
4 changes: 4 additions & 0 deletions examples/print_hello.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
def print_hello():
print("hello")


def print_hello_from_callback(context):
print("hello from callback")
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ freeze = "pip freeze"
static-check = " pre-commit run --files dagfactory/*"
test = 'sh scripts/test/unit.sh'
test-cov = 'sh scripts/test/unit-cov.sh'
test-integration = 'sh scripts/test/integration.sh'
test-integration-setup = 'sh scripts/test/integration-setup.sh'

[project.urls]
Source = "https://github.com/astronomer/dag-factory"
Expand Down
10 changes: 10 additions & 0 deletions scripts/test/integration-setup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

set -v
set -x
set -e

rm -rf airflow.*
pip freeze | grep airflow
airflow db reset -y
airflow db init
20 changes: 20 additions & 0 deletions scripts/test/integration.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/bin/bash

set -x
set -e


pip freeze | grep airflow
echo $AIRFLOW_HOME
ls $AIRFLOW_HOME

airflow db check

ln -s examples dags

pytest -vv \
--cov=dagfactory \
--cov-report=term-missing \
--cov-report=xml \
--durations=0 \
-m integration
54 changes: 52 additions & 2 deletions tests/test_example_dags.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,77 @@

from pathlib import Path

try:
from functools import cache
except ImportError:
from functools import lru_cache as cache

import airflow
import pytest
from airflow.models.dagbag import DagBag
from airflow.utils.db import create_default_connections
from airflow.utils.session import provide_session
from packaging.version import Version

from . import utils as test_utils

EXAMPLE_DAGS_DIR = Path(__file__).parent.parent / "examples"
AIRFLOW_IGNORE_FILE = EXAMPLE_DAGS_DIR / ".airflowignore"
AIRFLOW_VERSION = Version(airflow.__version__)

IGNORED_DAG_FILES = []

MIN_VER_DAG_FILE_VER: dict[str, list[str]] = {
"2.3": ["example_dynamic_task_mapping.py"],
}


def test_no_import_errors():
@provide_session
def get_session(session=None):
create_default_connections(session)
return session


@pytest.fixture()
def session():
return get_session()


@cache
def get_dag_bag() -> DagBag:
"""Create a DagBag by adding the files that are not supported to .airflowignore"""

with open(AIRFLOW_IGNORE_FILE, "w+") as file:
for min_version, files in MIN_VER_DAG_FILE_VER.items():
if AIRFLOW_VERSION < Version(min_version):
print(f"Adding {files} to .airflowignore")
file.writelines([f"{file}\n" for file in files])

for dagfile in IGNORED_DAG_FILES:
print(f"Adding {dagfile} to .airflowignore")
file.writelines([f"{dagfile}\n"])

print(".airflowignore contents: ")
print(AIRFLOW_IGNORE_FILE.read_text())
db = DagBag(EXAMPLE_DAGS_DIR, include_examples=False)
assert db.dags
assert not db.import_errors
return db


def get_dag_ids() -> list[str]:
dag_bag = get_dag_bag()
return dag_bag.dag_ids


@pytest.mark.integration
@pytest.mark.parametrize("dag_id", get_dag_ids())
def test_example_dag(session, dag_id: str):
dag_bag = get_dag_bag()
dag = dag_bag.get_dag(dag_id)

# This feature is available since Airflow 2.5:
# https://airflow.apache.org/docs/apache-airflow/stable/release_notes.html#airflow-2-5-0-2022-12-02
if AIRFLOW_VERSION >= Version("2.5"):
dag.test()
else:
test_utils.run_dag(dag)
Loading

0 comments on commit 55a1a2f

Please sign in to comment.