diff --git a/.circleci/config.yml b/.circleci/config.yml index 6ed1bd286..905543c5a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,6 +42,10 @@ workflows: <<: *main_and_release_branches - build-docs: <<: *all_branches_and_version_tag + - test-python3-11: + <<: *all_branches_and_version_tag + requires: + - markdown-link-check - test: name: test-python<< matrix.python_version >> matrix: @@ -164,6 +168,42 @@ jobs: - ~/.pyenv/versions/ key: docs-{{ .Branch }}-{{ checksum "setup.cfg" }}-{{ checksum ".readthedocs.yaml" }} + # This runs unit tests for Python3.11 only and excludes the Apache Hive test since + # Currently, the Airflow Hive provider is excluded from Python3.11 in Airflow + # Check the issue https://github.com/cloudera/python-sasl/issues/30 for detail + # PR to bring back Airflow Apache Hive provider https://github.com/apache/airflow/pull/32607 + test-python3-11: + description: "Test Python-3.11" + executor: + name: docker-executor + python_version: "3.11" + parallelism: 4 + steps: + - checkout + - restore_cache: + keys: + - deps-{{ .Branch }}-{{ checksum "setup.cfg" }}-{{ checksum "/home/circleci/.pyenv/version" }} + - deps-main-{{ checksum "setup.cfg" }}-{{ checksum "/home/circleci/.pyenv/version" }} + - run: + name: Install Dependencies + command: pip install -U -e .[test_python_3_11,tests] + - run: + name: Run tests + command: | + set -e + TEST_FILES=$(circleci tests \ + glob "tests/**/test_*.py" | \ + sed '/tests\/apache\/hive/d' | \ + circleci tests split --split-by=timings \ + ) + pytest --junit-xml=test-report/report.xml $TEST_FILES + - store_test_results: + path: test-report + - save_cache: + paths: + - ~/.cache/pip + - ~/.pyenv/versions/ + key: deps-{{ .Branch }}-{{ checksum "setup.cfg" }}-{{ checksum "/home/circleci/.pyenv/version" }} test: parameters: diff --git a/.circleci/scripts/pre_commit_readme_extra.py b/.circleci/scripts/pre_commit_readme_extra.py index 94618fed9..f62307970 100755 --- a/.circleci/scripts/pre_commit_readme_extra.py +++ b/.circleci/scripts/pre_commit_readme_extra.py @@ -10,7 +10,7 @@ config.read(repo_dir / "setup.cfg") all_extra = [] -extra_to_exclude = {"tests", "mypy", "docs"} +extra_to_exclude = {"tests", "mypy", "docs", "test_python_3_11"} all_extras = set(config["options.extras_require"].keys()) - extra_to_exclude readme_path = repo_dir / "README.rst" diff --git a/.circleci/scripts/pre_commit_setup_cfg_python_3_11_all_extra.py b/.circleci/scripts/pre_commit_setup_cfg_python_3_11_all_extra.py new file mode 100644 index 000000000..95957172b --- /dev/null +++ b/.circleci/scripts/pre_commit_setup_cfg_python_3_11_all_extra.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +""" +Pre-commit hook to sync a "test_python_3_11" extra in setup.cfg. +It will contain all the dependencies apart from tests and mypy. +""" +import configparser +from pathlib import Path + +repo_dir = Path(__file__).parent.parent.parent + +config = configparser.ConfigParser(strict=False) +config.read(repo_dir / "setup.cfg") + +extra_to_exclude = {"tests", "mypy", "docs", "all", "test_python_3_11", "apache.hive"} +expected_test_python_3_11_extra = { + req + for key, extra_value in config["options.extras_require"].items() + for req in extra_value.split() + if key not in extra_to_exclude +} +found_test_python_3_11_extra = set(config["options.extras_require"].get("test_python_3_11", "").split()) +if not found_test_python_3_11_extra: + raise SystemExit("Missing 'test_python_3_11' extra in setup.cfg") + +""" +Use XOR operator ^ to find the missing dependencies instead of set A - set B +set A - set B will only show difference of set A from set B, but we want see overall diff +""" +diff_extras = expected_test_python_3_11_extra ^ found_test_python_3_11_extra +if diff_extras: + diff_extras_str = "\n \t" + "\n \t".join(sorted(diff_extras)) + raise SystemExit( + f"'test_python_3_11' extra in setup.cfg is missing some dependencies:\n {diff_extras_str}" + ) diff --git a/setup.cfg b/setup.cfg index 5d9f8f19b..c8a478234 100644 --- a/setup.cfg +++ b/setup.cfg @@ -138,6 +138,27 @@ all = paramiko snowflake-sqlalchemy>=1.4.4 # Temporary solution for https://github.com/astronomer/astronomer-providers/issues/958, we should pin apache-airflow-providers-snowflake version after it pins this package to great than or equal to 1.4.4. +test_python_3_11 = + aiobotocore>=2.1.1 + apache-airflow-providers-amazon>=3.0.0 + apache-airflow-providers-apache-livy + apache-airflow-providers-cncf-kubernetes>=4 + apache-airflow-providers-databricks>=2.2.0 + apache-airflow-providers-google>=8.1.0 + apache-airflow-providers-http + apache-airflow-providers-snowflake + apache-airflow-providers-sftp + apache-airflow-providers-microsoft-azure + asyncssh>=2.12.0 + databricks-sql-connector>=2.0.4;python_version>='3.10' + apache-airflow-providers-dbt-cloud>=2.1.0 + gcloud-aio-bigquery + gcloud-aio-storage + kubernetes_asyncio + openlineage-airflow>=0.12.0 + paramiko + snowflake-sqlalchemy>=1.4.4 # Temporary solution for https://github.com/astronomer/astronomer-providers/issues/958, we should pin apache-airflow-providers-snowflake version after it pins this package to great than or equal to 1.4.4. + [options.packages.find] include = astronomer.* diff --git a/tests/core/triggers/test_external_task.py b/tests/core/triggers/test_external_task.py index f2171bf99..416c90dc4 100644 --- a/tests/core/triggers/test_external_task.py +++ b/tests/core/triggers/test_external_task.py @@ -2,7 +2,6 @@ from unittest import mock from unittest.mock import AsyncMock -import asynctest import pytest from airflow import AirflowException from airflow.operators.empty import EmptyOperator @@ -222,7 +221,7 @@ async def test_deployment_task_exception(self, mock_run): assert TriggerEvent({"state": "error", "message": "Test exception"}) == actual @pytest.mark.asyncio - @asynctest.patch("astronomer.providers.http.hooks.http.HttpHookAsync.run") + @mock.patch("astronomer.providers.http.hooks.http.HttpHookAsync.run") async def test_deployment_complete(self, mock_run): """Assert ExternalDeploymentTaskTrigger runs and complete the run in success state""" mock.AsyncMock(HttpHookAsync)