diff --git a/.dockerignore b/.dockerignore index 31ef8bb9ac26..bdbf5fb0883a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -34,6 +34,7 @@ !chart !docs !licenses +!providers/ # Add those folders to the context so that they are available in the CI container !scripts diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index be62d541f0de..019ea900f8e1 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -19,532 +19,532 @@ labelPRBasedOnFilePath: provider:airbyte: - - airflow/providers/airbyte/**/* + - providers/src/airflow/providers/airbyte/**/* - docs/apache-airflow-providers-airbyte/**/* - - tests/providers/airbyte/**/* - - tests/system/providers/airbyte/**/* + - providers/tests/airbyte/**/* + - providers/tests/system/airbyte/**/* provider:alibaba: - - airflow/providers/alibaba/**/* + - providers/src/airflow/providers/alibaba/**/* - docs/apache-airflow-providers-alibaba/**/* - - tests/providers/alibaba/**/* - - tests/system/providers/alibaba/**/* + - providers/tests/alibaba/**/* + - providers/tests/system/alibaba/**/* provider:amazon-aws: - - airflow/providers/amazon/aws/**/* - - tests/providers/amazon/aws/**/* + - providers/src/airflow/providers/amazon/aws/**/* + - providers/tests/amazon/aws/**/* - docs/apache-airflow-providers-amazon/**/* - - tests/system/providers/amazon/aws/**/* + - providers/tests/system/amazon/aws/**/* provider:apache-beam: - - airflow/providers/apache/beam/**/* + - providers/src/airflow/providers/apache/beam/**/* - docs/apache-airflow-providers-apache-beam/**/* - - tests/providers/apache/beam/**/* - - tests/system/providers/apache/beam/**/* + - providers/tests/apache/beam/**/* + - providers/tests/system/apache/beam/**/* provider:apache-cassandra: - - airflow/providers/apache/cassandra/**/* + - providers/src/airflow/providers/apache/cassandra/**/* - docs/apache-airflow-providers-apache-cassandra/**/* - - tests/providers/apache/cassandra/**/* - - tests/system/providers/apache/cassandra/**/* + - providers/tests/apache/cassandra/**/* + - providers/tests/system/apache/cassandra/**/* provider:apache-drill: - - airflow/providers/apache/drill/**/* + - providers/src/airflow/providers/apache/drill/**/* - docs/apache-airflow-providers-apache-drill/**/* - - tests/providers/apache/drill/**/* - - tests/system/providers/apache/drill/**/* + - providers/tests/apache/drill/**/* + - providers/tests/system/apache/drill/**/* provider:apache-druid: - - airflow/providers/apache/druid/**/* + - providers/src/airflow/providers/apache/druid/**/* - docs/apache-airflow-providers-apache-druid/**/* - - tests/providers/apache/druid/**/* - - tests/system/providers/apache/druid/**/* + - providers/tests/apache/druid/**/* + - providers/tests/system/apache/druid/**/* provider:apache-flink: - - airflow/providers/apache/flink/**/* + - providers/src/airflow/providers/apache/flink/**/* - docs/apache-airflow-providers-apache-flink/**/* - - tests/providers/apache/flink/**/* + - providers/tests/apache/flink/**/* provider:apache-hdfs: - - airflow/providers/apache/hdfs/**/* + - providers/src/airflow/providers/apache/hdfs/**/* - docs/apache-airflow-providers-apache-hdfs/**/* - - tests/providers/apache/hdfs/**/* + - providers/tests/apache/hdfs/**/* provider:apache-hive: - - airflow/providers/apache/hive/**/* + - providers/src/airflow/providers/apache/hive/**/* - docs/apache-airflow-providers-apache-hive/**/* - - tests/providers/apache/hive/**/* - - tests/system/providers/apache/hive/**/* + - providers/tests/apache/hive/**/* + - providers/tests/system/apache/hive/**/* provider:apache-iceberg: - - airflow/providers/apache/iceberg/**/* + - providers/src/airflow/providers/apache/iceberg/**/* - docs/apache-airflow-providers-apache-iceberg/**/* - - tests/providers/apache/iceberg/**/* - - tests/system/providers/apache/iceberg/**/* + - providers/tests/apache/iceberg/**/* + - providers/tests/system/apache/iceberg/**/* provider:apache-impala: - - airflow/providers/apache/impala/**/* + - providers/src/airflow/providers/apache/impala/**/* - docs/apache-airflow-providers-apache-impala/**/* - - tests/providers/apache/impala/**/* + - providers/tests/apache/impala/**/* provider:apache-kafka: - - airflow/providers/apache/kafka/**/* + - providers/src/airflow/providers/apache/kafka/**/* - docs/apache-airflow-providers-apache-kafka/**/* - - tests/providers/apache/kafka/**/* - - tests/system/providers/apache/kafka/**/* + - providers/tests/apache/kafka/**/* + - providers/tests/system/apache/kafka/**/* provider:apache-kylin: - - airflow/providers/apache/kylin/**/* + - providers/src/airflow/providers/apache/kylin/**/* - docs/apache-airflow-providers-apache-kylin/**/* - - tests/providers/apache/kylin/**/* - - tests/system/providers/apache/kylin/**/* + - providers/tests/apache/kylin/**/* + - providers/tests/system/apache/kylin/**/* provider:apache-livy: - - airflow/providers/apache/livy/**/* + - providers/src/airflow/providers/apache/livy/**/* - docs/apache-airflow-providers-apache-livy/**/* - - tests/providers/apache/livy/**/* - - tests/system/providers/apache/livy/**/* + - providers/tests/apache/livy/**/* + - providers/tests/system/apache/livy/**/* provider:apache-pig: - - airflow/providers/apache/pig/**/* + - providers/src/airflow/providers/apache/pig/**/* - docs/apache-airflow-providers-apache-pig/**/* - - tests/providers/apache/pig/**/* - - tests/system/providers/apache/pig/**/* + - providers/tests/apache/pig/**/* + - providers/tests/system/apache/pig/**/* provider:apache-pinot: - - airflow/providers/apache/pinot/**/* + - providers/src/airflow/providers/apache/pinot/**/* - docs/apache-airflow-providers-apache-pinot/**/* - - tests/providers/apache/pinot/**/* - - tests/system/providers/apache/pinot/**/* + - providers/tests/apache/pinot/**/* + - providers/tests/system/apache/pinot/**/* provider:apache-spark: - - airflow/providers/apache/spark/**/* + - providers/src/airflow/providers/apache/spark/**/* - docs/apache-airflow-providers-apache-spark/**/* - - tests/providers/apache/spark/**/* - - tests/system/providers/apache/spark/**/* + - providers/tests/apache/spark/**/* + - providers/tests/system/apache/spark/**/* provider:apprise: - - airflow/providers/apprise/**/* + - providers/src/airflow/providers/apprise/**/* - docs/apache-airflow-providers-apprise/**/* - - tests/providers/apprise/**/* + - providers/tests/apprise/**/* provider:arangodb: - - airflow/providers/arangodb/**/* + - providers/src/airflow/providers/arangodb/**/* - docs/apache-airflow-providers-arangodb/**/* - - tests/providers/arangodb/**/* + - providers/tests/arangodb/**/* provider:asana: - - airflow/providers/asana/**/* + - providers/src/airflow/providers/asana/**/* - docs/apache-airflow-providers-asana/**/* - - tests/providers/asana/**/* - - tests/system/providers/asana/**/* + - providers/tests/asana/**/* + - providers/tests/system/asana/**/* provider:atlassian-jira: - - airflow/providers/atlassian/jira/**/* + - providers/src/airflow/providers/atlassian/jira/**/* - docs/apache-airflow-providers-atlassian-jira/**/* - - tests/providers/atlassian/jira/**/* + - providers/tests/atlassian/jira/**/* provider:celery: - - airflow/providers/celery/**/* + - providers/src/airflow/providers/celery/**/* - docs/apache-airflow-providers-celery/**/* - - tests/providers/celery/**/* + - providers/tests/celery/**/* provider:cloudant: - - airflow/providers/cloudant/**/* + - providers/src/airflow/providers/cloudant/**/* - docs/apache-airflow-providers-cloudant/**/* - - tests/providers/cloudant/**/* + - providers/tests/cloudant/**/* provider:cncf-kubernetes: - airflow/**/kubernetes_*.py - airflow/example_dags/example_kubernetes_executor.py - - airflow/providers/cncf/kubernetes/**/* - - airflow/providers/celery/executors/celery_kubernetes_executor.py + - providers/src/airflow/providers/cncf/kubernetes/**/* + - providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py - docs/apache-airflow-providers-cncf-kubernetes/**/* - kubernetes_tests/**/* - - tests/providers/cncf/kubernetes/**/* - - tests/system/providers/cncf/kubernetes/**/* + - providers/tests/cncf/kubernetes/**/* + - providers/tests/system/cncf/kubernetes/**/* provider:cohere: - - airflow/providers/cohere/**/* + - providers/src/airflow/providers/cohere/**/* - docs/apache-airflow-providers-cohere/**/* - - tests/providers/cohere/**/* - - tests/system/providers/cohere/**/* + - providers/tests/cohere/**/* + - providers/tests/system/cohere/**/* provider:common-compat: - - airflow/providers/common/compat/**/* + - providers/src/airflow/providers/common/compat/**/* - docs/apache-airflow-providers-common-compat/**/* - - tests/providers/common/compat/**/* + - providers/tests/common/compat/**/* provider:common-io: - - airflow/providers/common/io/**/* + - providers/src/airflow/providers/common/io/**/* - docs/apache-airflow-providers-common-io/**/* - - tests/system/providers/common/io/**/* + - providers/tests/system/common/io/**/* provider:common-sql: - - airflow/providers/common/sql/**/* + - providers/src/airflow/providers/common/sql/**/* - docs/apache-airflow-providers-common-sql/**/* - - tests/providers/common/sql/**/* - - tests/system/providers/common/sql/**/* + - providers/tests/common/sql/**/* + - providers/tests/system/common/sql/**/* provider:standard: - - airflow/providers/standard/**/* + - providers/src/airflow/providers/standard/**/* - docs/apache-airflow-providers-standard/**/* - - tests/providers/standard/**/* + - providers/tests/standard/**/* provider:databricks: - - airflow/providers/databricks/**/* + - providers/src/airflow/providers/databricks/**/* - docs/apache-airflow-providers-databricks/**/* - - tests/providers/databricks/**/* - - tests/system/providers/databricks/**/* + - providers/tests/databricks/**/* + - providers/tests/system/databricks/**/* provider:datadog: - - airflow/providers/datadog/**/* + - providers/src/airflow/providers/datadog/**/* - docs/apache-airflow-providers-datadog/**/* - - tests/providers/datadog/**/* + - providers/tests/datadog/**/* provider:dbt-cloud: - - airflow/providers/dbt/cloud/**/* + - providers/src/airflow/providers/dbt/cloud/**/* - docs/apache-airflow-providers-dbt-cloud/**/* - - tests/providers/dbt/cloud/**/* - - tests/system/providers/dbt/cloud/**/* + - providers/tests/dbt/cloud/**/* + - providers/tests/system/dbt/cloud/**/* provider:dingding: - - airflow/providers/dingding/**/* + - providers/src/airflow/providers/dingding/**/* - docs/apache-airflow-providers-dingding/**/* - - tests/providers/dingding/**/* - - tests/system/providers/dingding/**/* + - providers/tests/dingding/**/* + - providers/tests/system/dingding/**/* provider:discord: - - airflow/providers/discord/**/* + - providers/src/airflow/providers/discord/**/* - docs/apache-airflow-providers-discord/**/* - - tests/providers/discord/**/* + - providers/tests/discord/**/* provider:docker: - - airflow/providers/docker/**/* + - providers/src/airflow/providers/docker/**/* - docs/apache-airflow-providers-docker/**/* - - tests/providers/docker/**/* - - tests/system/providers/docker/**/* + - providers/tests/docker/**/* + - providers/tests/system/docker/**/* provider:elasticsearch: - - airflow/providers/elasticsearch/**/* + - providers/src/airflow/providers/elasticsearch/**/* - docs/apache-airflow-providers-elasticsearch/**/* - - tests/providers/elasticsearch/**/* - - tests/system/providers/elasticsearch/**/* + - providers/tests/elasticsearch/**/* + - providers/tests/system/elasticsearch/**/* provider:exasol: - - airflow/providers/exasol/**/* + - providers/src/airflow/providers/exasol/**/* - docs/apache-airflow-providers-exasol/**/* - - tests/providers/exasol/**/* + - providers/tests/exasol/**/* provider:fab: - - airflow/providers/fab/**/* + - providers/src/airflow/providers/fab/**/* - docs/apache-airflow-providers-fab/**/* - - tests/providers/fab/**/* + - providers/tests/fab/**/* provider:facebook: - - airflow/providers/facebook/**/* + - providers/src/airflow/providers/facebook/**/* - docs/apache-airflow-providers-facebook/**/* - - tests/providers/facebook/**/* + - providers/tests/facebook/**/* provider:ftp: - - airflow/providers/ftp/**/* + - providers/src/airflow/providers/ftp/**/* - docs/apache-airflow-providers-ftp/**/* - - tests/providers/ftp/**/* - - tests/system/providers/ftp/**/* + - providers/tests/ftp/**/* + - providers/tests/system/ftp/**/* provider:github: - - airflow/providers/github/**/* + - providers/src/airflow/providers/github/**/* - docs/apache-airflow-providers-github/**/* - - tests/providers/github/**/* - - tests/system/providers/github/**/* + - providers/tests/github/**/* + - providers/tests/system/github/**/* provider:google: - - airflow/providers/google/**/* + - providers/src/airflow/providers/google/**/* - docs/apache-airflow-providers-google/**/* - - tests/providers/google/**/* - - tests/system/providers/google/**/* + - providers/tests/google/**/* + - providers/tests/system/google/**/* provider:grpc: - - airflow/providers/grpc/**/* + - providers/src/airflow/providers/grpc/**/* - docs/apache-airflow-providers-grpc/**/* - - tests/providers/grpc/**/* + - providers/tests/grpc/**/* provider:hashicorp: - - airflow/providers/hashicorp/**/* + - providers/src/airflow/providers/hashicorp/**/* - docs/apache-airflow-providers-hashicorp/**/* - - tests/providers/hashicorp/**/* + - providers/tests/hashicorp/**/* provider:http: - - airflow/providers/http/**/* + - providers/src/airflow/providers/http/**/* - docs/apache-airflow-providers-http/**/* - - tests/providers/http/**/* - - tests/system/providers/http/**/* + - providers/tests/http/**/* + - providers/tests/system/http/**/* provider:imap: - - airflow/providers/imap/**/* + - providers/src/airflow/providers/imap/**/* - docs/apache-airflow-providers-imap/**/* - - tests/providers/imap/**/* + - providers/tests/imap/**/* provider:influxdb: - - airflow/providers/influxdb/**/* + - providers/src/airflow/providers/influxdb/**/* - docs/apache-airflow-providers-influxdb/**/* - - tests/providers/influxdb/**/* - - tests/system/providers/influxdb/**/* + - providers/tests/influxdb/**/* + - providers/tests/system/influxdb/**/* provider:jdbc: - - airflow/providers/jdbc/**/* + - providers/src/airflow/providers/jdbc/**/* - docs/apache-airflow-providers-jdbc/**/* - - tests/providers/jdbc/**/* - - tests/system/providers/jdbc/**/* + - providers/tests/jdbc/**/* + - providers/tests/system/jdbc/**/* provider:jenkins: - - airflow/providers/jenkins/**/* + - providers/src/airflow/providers/jenkins/**/* - docs/apache-airflow-providers-jenkins/**/* - - tests/providers/jenkins/**/* - - tests/system/providers/jenkins/**/* + - providers/tests/jenkins/**/* + - providers/tests/system/jenkins/**/* provider:microsoft-azure: - - airflow/providers/microsoft/azure/**/* - - tests/providers/microsoft/azure/**/* + - providers/src/airflow/providers/microsoft/azure/**/* + - providers/tests/microsoft/azure/**/* - docs/apache-airflow-providers-microsoft-azure/**/* - - tests/system/providers/microsoft/azure/**/* + - providers/tests/system/microsoft/azure/**/* provider:microsoft-mssql: - - airflow/providers/microsoft/mssql/**/* + - providers/src/airflow/providers/microsoft/mssql/**/* - docs/apache-airflow-providers-microsoft-mssql/**/* - - tests/providers/microsoft/mssql/**/* - - tests/system/providers/microsoft/mssql/**/* + - providers/tests/microsoft/mssql/**/* + - providers/tests/system/microsoft/mssql/**/* provider:microsoft-psrp: - - airflow/providers/microsoft/psrp/**/* + - providers/src/airflow/providers/microsoft/psrp/**/* - docs/apache-airflow-providers-microsoft-psrp/**/* - - tests/providers/microsoft/psrp/**/* + - providers/tests/microsoft/psrp/**/* provider:microsoft-winrm: - - airflow/providers/microsoft/winrm/**/* + - providers/src/airflow/providers/microsoft/winrm/**/* - docs/apache-airflow-providers-microsoft-winrm/**/* - - tests/providers/microsoft/winrm/**/* - - tests/system/providers/microsoft/winrm/**/* + - providers/tests/microsoft/winrm/**/* + - providers/tests/system/microsoft/winrm/**/* provider:mongo: - - airflow/providers/mongo/**/* + - providers/src/airflow/providers/mongo/**/* - docs/apache-airflow-providers-mongo/**/* - - tests/providers/mongo/**/* + - providers/tests/mongo/**/* provider:mysql: - - airflow/providers/mysql/**/* + - providers/src/airflow/providers/mysql/**/* - docs/apache-airflow-providers-mysql/**/* - - tests/providers/mysql/**/* - - tests/system/providers/mysql/**/* + - providers/tests/mysql/**/* + - providers/tests/system/mysql/**/* provider:neo4j: - - airflow/providers/neo4j/**/* + - providers/src/airflow/providers/neo4j/**/* - docs/apache-airflow-providers-neo4j/**/* - - tests/providers/neo4j/**/* - - tests/system/providers/neo4j/**/* + - providers/tests/neo4j/**/* + - providers/tests/system/neo4j/**/* provider:odbc: - - airflow/providers/odbc/**/* + - providers/src/airflow/providers/odbc/**/* - docs/apache-airflow-providers-odbc/**/* - - tests/providers/odbc/**/* + - providers/tests/odbc/**/* provider:openai: - - airflow/providers/openai/**/* + - providers/src/airflow/providers/openai/**/* - docs/apache-airflow-providers-openai/**/* - - tests/providers/openai/**/* - - tests/system/providers/openai/**/* + - providers/tests/openai/**/* + - providers/tests/system/openai/**/* provider:openfaas: - - airflow/providers/openfaas/**/* + - providers/src/airflow/providers/openfaas/**/* - docs/apache-airflow-providers-openfaas/**/* - - tests/providers/openfaas/**/* + - providers/tests/openfaas/**/* provider:openlineage: - - airflow/providers/openlineage/**/* + - providers/src/airflow/providers/openlineage/**/* - docs/apache-airflow-providers-openlineage/**/* - - tests/providers/openlineage/**/* + - providers/tests/openlineage/**/* provider:opensearch: - - airflow/providers/opensearch/**/* + - providers/src/airflow/providers/opensearch/**/* - docs/apache-airflow-providers-opensearch/**/* - - tests/providers/opensearch/**/* - - tests/system/providers/opensearch/**/* + - providers/tests/opensearch/**/* + - providers/tests/system/opensearch/**/* provider:opsgenie: - - airflow/providers/opsgenie/**/* + - providers/src/airflow/providers/opsgenie/**/* - docs/apache-airflow-providers-opsgenie/**/* - - tests/providers/opsgenie/**/* - - tests/system/providers/opsgenie/**/* + - providers/tests/opsgenie/**/* + - providers/tests/system/opsgenie/**/* provider:Oracle: - - airflow/providers/oracle/**/* + - providers/src/airflow/providers/oracle/**/* - docs/apache-airflow-providers-oracle/**/* - - tests/providers/oracle/**/* + - providers/tests/oracle/**/* provider:pagerduty: - - airflow/providers/pagerduty/**/* + - providers/src/airflow/providers/pagerduty/**/* - docs/apache-airflow-providers-pagerduty/**/* - - tests/providers/pagerduty/**/* + - providers/tests/pagerduty/**/* provider:papermill: - - airflow/providers/papermill/**/* + - providers/src/airflow/providers/papermill/**/* - docs/apache-airflow-providers-papermill/**/* - - tests/providers/papermill/**/* - - tests/system/providers/papermill/**/* + - providers/tests/papermill/**/* + - providers/tests/system/papermill/**/* provider:pgvector: - - airflow/providers/pgvector/**/* + - providers/src/airflow/providers/pgvector/**/* - docs/apache-airflow-providers-pgvector/**/* - - tests/providers/pgvector/**/* - - tests/system/providers/pgvector/**/* + - providers/tests/pgvector/**/* + - providers/tests/system/pgvector/**/* provider:pinecone: - - airflow/providers/pinecone/**/* + - providers/src/airflow/providers/pinecone/**/* - docs/apache-airflow-providers-pinecone/**/* - - tests/providers/pinecone/**/* - - tests/system/providers/pinecone/**/* + - providers/tests/pinecone/**/* + - providers/tests/system/pinecone/**/* provider:postgres: - - airflow/providers/postgres/**/* + - providers/src/airflow/providers/postgres/**/* - docs/apache-airflow-providers-postgres/**/* - - tests/providers/postgres/**/* - - tests/system/providers/postgres/**/* + - providers/tests/postgres/**/* + - providers/tests/system/postgres/**/* provider:presto: - - airflow/providers/presto/**/* + - providers/src/airflow/providers/presto/**/* - docs/apache-airflow-providers-presto/**/* - - tests/providers/presto/**/* - - tests/system/providers/presto/**/* + - providers/tests/presto/**/* + - providers/tests/system/presto/**/* provider:qdrant: - - airflow/providers/qdrant/**/* + - providers/src/airflow/providers/qdrant/**/* - docs/apache-airflow-providers-qdrant/**/* - - tests/providers/qdrant/**/* - - tests/system/providers/qdrant/**/* + - providers/tests/qdrant/**/* + - providers/tests/system/qdrant/**/* provider:redis: - - airflow/providers/redis/**/* + - providers/src/airflow/providers/redis/**/* - docs/apache-airflow-providers-redis/**/* - - tests/providers/redis/**/* + - providers/tests/redis/**/* provider:salesforce: - - airflow/providers/salesforce/**/* + - providers/src/airflow/providers/salesforce/**/* - docs/apache-airflow-providers-salesforce/**/* - - tests/providers/salesforce/**/* - - tests/system/providers/salesforce/**/* + - providers/tests/salesforce/**/* + - providers/tests/system/salesforce/**/* provider:samba: - - airflow/providers/samba/**/* + - providers/src/airflow/providers/samba/**/* - docs/apache-airflow-providers-samba/**/* - - tests/providers/samba/**/* + - providers/tests/samba/**/* provider:segment: - - airflow/providers/segment/**/* + - providers/src/airflow/providers/segment/**/* - docs/apache-airflow-providers-segment/**/* - - tests/providers/segment/**/* + - providers/tests/segment/**/* provider:sendgrid: - - airflow/providers/segment/**/* + - providers/src/airflow/providers/segment/**/* - docs/apache-airflow-providers-segment/**/* - - tests/providers/segment/**/* + - providers/tests/segment/**/* provider:sftp: - - airflow/providers/sftp/**/* + - providers/src/airflow/providers/sftp/**/* - docs/apache-airflow-providers-sftp/**/* - - tests/providers/sftp/**/* + - providers/tests/sftp/**/* provider:singularity: - - airflow/providers/singularity/**/* + - providers/src/airflow/providers/singularity/**/* - docs/apache-airflow-providers-singularity/**/* - - tests/providers/singularity/**/* - - tests/system/providers/singularity/**/* + - providers/tests/singularity/**/* + - providers/tests/system/singularity/**/* provider:slack: - - airflow/providers/slack/**/* + - providers/src/airflow/providers/slack/**/* - docs/apache-airflow-providers-slack/**/* - - tests/providers/slack/**/* - - tests/system/providers/slack/**/* + - providers/tests/slack/**/* + - providers/tests/system/slack/**/* provider:smtp: - - airflow/providers/smtp/**/* + - providers/src/airflow/providers/smtp/**/* - docs/apache-airflow-providers-smtp/**/* - - tests/providers/smtp/**/* + - providers/tests/smtp/**/* provider:snowflake: - - airflow/providers/snowflake/**/* + - providers/src/airflow/providers/snowflake/**/* - docs/apache-airflow-providers-snowflake/**/* - - tests/providers/snowflake/**/* - - tests/system/providers/snowflake/**/* + - providers/tests/snowflake/**/* + - providers/tests/system/snowflake/**/* provider:sqlite: - - airflow/providers/sqlite/**/* + - providers/src/airflow/providers/sqlite/**/* - docs/apache-airflow-providers-sqlite/**/* - - tests/providers/sqlite/**/* - - tests/system/providers/sqlite/**/* + - providers/tests/sqlite/**/* + - providers/tests/system/sqlite/**/* provider:ssh: - - airflow/providers/ssh/**/* + - providers/src/airflow/providers/ssh/**/* - docs/apache-airflow-providers-ssh/**/* - - tests/providers/ssh/**/* + - providers/tests/ssh/**/* provider:tableau: - - airflow/providers/tableau/**/* + - providers/src/airflow/providers/tableau/**/* - docs/apache-airflow-providers-tableau/**/* - - tests/providers/tableau/**/* - - tests/system/providers/tableau/**/* + - providers/tests/tableau/**/* + - providers/tests/system/tableau/**/* provider:telegram: - - airflow/providers/telegram/**/* + - providers/src/airflow/providers/telegram/**/* - docs/apache-airflow-providers-telegram/**/* - - tests/providers/telegram/**/* - - tests/system/providers/telegram/**/* + - providers/tests/telegram/**/* + - providers/tests/system/telegram/**/* provider:teradata: - - airflow/providers/teradata/**/* + - providers/src/airflow/providers/teradata/**/* - docs/apache-airflow-providers-teradata/**/* - - tests/providers/teradata/**/* - - tests/system/providers/teradata/**/* + - providers/tests/teradata/**/* + - providers/tests/system/teradata/**/* provider:trino: - - airflow/providers/trino/**/* + - providers/src/airflow/providers/trino/**/* - docs/apache-airflow-providers-trino/**/* - - tests/providers/trino/**/* - - tests/system/providers/trino/**/* + - providers/tests/trino/**/* + - providers/tests/system/trino/**/* provider:vertica: - - airflow/providers/vertica/**/* + - providers/src/airflow/providers/vertica/**/* - docs/apache-airflow-providers-vertica/**/* - - tests/providers/vertica/**/* + - providers/tests/vertica/**/* provider:weaviate: - - airflow/providers/weaviate/**/* + - providers/src/airflow/providers/weaviate/**/* - docs/apache-airflow-providers-weaviate/**/* - - tests/providers/weaviate/**/* - - tests/system/providers/weaviate/**/* + - providers/tests/weaviate/**/* + - providers/tests/system/weaviate/**/* provider:yandex: - - airflow/providers/yandex/**/* + - providers/src/airflow/providers/yandex/**/* - docs/apache-airflow-providers-yandex/**/* - - tests/providers/yandex/**/* - - tests/system/providers/yandex/**/* + - providers/tests/yandex/**/* + - providers/tests/system/yandex/**/* provider:ydb: - - airflow/providers/ydb/**/* + - providers/src/airflow/providers/ydb/**/* - docs/apache-airflow-providers-ydb/**/* - - tests/providers/ydb/**/* - - tests/system/providers/ydb/**/* + - providers/tests/ydb/**/* + - providers/tests/system/ydb/**/* provider:zendesk: - - airflow/providers/zendesk/**/* + - providers/src/airflow/providers/zendesk/**/* - docs/apache-airflow-providers-zendesk/**/* - - tests/providers/zendesk/**/* - - tests/system/providers/zendesk/**/* + - providers/tests/zendesk/**/* + - providers/tests/system/zendesk/**/* area:providers: - - airflow/providers/**/* + - providers/src/airflow/providers/**/* - docs/apache-airflow-providers-*/**/* - - tests/providers/**/* - - tests/system/providers/**/* + - providers/tests/**/* + - providers/tests/system/**/* area:API: - airflow/api/**/* @@ -611,10 +611,10 @@ labelPRBasedOnFilePath: - docs/apache-airflow/administration-and-deployment/lineage.rst area:Logging: - - airflow/providers/**/log/* + - providers/src/airflow/providers/**/log/* - airflow/utils/log/**/* - docs/apache-airflow/administration-and-deployment/logging-monitoring/logging-*.rst - - tests/providers/**/log/* + - providers/tests/**/log/* - tests/utils/log/**/* area:Plugins: @@ -638,9 +638,9 @@ labelPRBasedOnFilePath: area:Secrets: - airflow/secrets/**/* - - airflow/providers/**/secrets/* + - providers/src/airflow/providers/**/secrets/* - tests/secrets/**/* - - tests/providers/**/secrets/* + - providers/tests/**/secrets/* - docs/apache-airflow/security/secrets/**/* area:Triggerer: diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 49d6a7245bc8..509d7066f6d3 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -195,9 +195,10 @@ jobs: env: HATCH_ENV: "test" working-directory: ./clients/python - - name: "Prepare FAB provider packages: wheel" + - name: "Prepare FAB+standard provider packages: wheel" run: > - breeze release-management prepare-provider-packages fab --package-format wheel --skip-tag-check + breeze release-management prepare-provider-packages fab standard \ + --package-format wheel --skip-tag-check - name: "Install Airflow with fab for webserver tests" run: pip install . dist/apache_airflow_providers_fab-*.whl - name: "Prepare Standard provider packages: wheel" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ce557dba431b..748a85e860bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -172,7 +172,7 @@ repos: name: Check and update common.sql API stubs entry: ./scripts/ci/pre_commit/update_common_sql_api_stubs.py language: python - files: ^scripts/ci/pre_commit/update_common_sql_api\.py|^airflow/providers/common/sql/.*\.pyi?$ + files: ^scripts/ci/pre_commit/update_common_sql_api\.py|^providers/src/airflow/providers/common/sql/.*\.pyi?$ additional_dependencies: ['rich>=12.4.4', 'mypy==1.9.0', 'black==23.10.0', 'jinja2'] pass_filenames: false require_serial: true @@ -225,7 +225,7 @@ repos: entry: ./scripts/ci/pre_commit/check_deferrable_default.py pass_filenames: false additional_dependencies: ["libcst>=1.1.0"] - files: ^airflow/.*/sensors/.*\.py$|^airflow/.*/operators/.*\.py$ + files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs rev: 1.18.0 hooks: @@ -326,7 +326,7 @@ repos: exclude: ^.*/.*_vendor/|^airflow/www/static/css/material-icons\.css$|^images/.*$|^RELEASE_NOTES\.txt$|^.*package-lock\.json$|^.*/kinglear\.txt$|^.*pnpm-lock\.yaml$ args: - --ignore-words=docs/spelling_wordlist.txt - - --skip=airflow/providers/*/*.rst,airflow/www/*.log,docs/*/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md + - --skip=providers/src/airflow/providers/*/*.rst,airflow/www/*.log,docs/*/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md - --exclude-file=.codespellignorelines - repo: local # Note that this is the 2nd "local" repo group in the .pre-commit-config.yaml file. This is because @@ -343,7 +343,7 @@ repos: language: python entry: ./scripts/ci/pre_commit/validate_operators_init.py pass_filenames: true - files: ^airflow/providers/.*/(operators|transfers|sensors)/.*\.py$ + files: ^providers/src/airflow/providers/.*/(operators|transfers|sensors)/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] - id: ruff name: Run 'ruff' for extremely fast Python linting @@ -418,7 +418,7 @@ repos: language: python files: ^airflow/.*\.py$ require_serial: true - exclude: ^airflow/kubernetes/|^airflow/providers/ + exclude: ^airflow/kubernetes/|^providers/src/airflow/providers/ entry: ./scripts/ci/pre_commit/check_cncf_k8s_used_for_k8s_executor_only.py additional_dependencies: ['rich>=12.4.4'] - id: check-airflow-provider-compatibility @@ -426,7 +426,7 @@ repos: entry: ./scripts/ci/pre_commit/check_provider_airflow_compatibility.py language: python pass_filenames: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ additional_dependencies: ['rich>=12.4.4'] - id: check-google-re2-as-dependency name: Check google-re2 declared as dep @@ -435,7 +435,7 @@ repos: language: python pass_filenames: true require_serial: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ additional_dependencies: ['rich>=12.4.4'] - id: update-local-yml-file name: Update mounts in the local yml file @@ -449,13 +449,18 @@ repos: description: Check dependency of SQL Providers with common data structure entry: ./scripts/ci/pre_commit/check_common_sql_dependency.py language: python - files: ^airflow/providers/.*/hooks/.*\.py$ + files: ^providers/src/airflow/providers/.*/hooks/.*\.py$ additional_dependencies: ['rich>=12.4.4', 'pyyaml', 'packaging'] - id: update-providers-dependencies name: Update dependencies for provider packages entry: ./scripts/ci/pre_commit/update_providers_dependencies.py language: python - files: ^airflow/providers/.*\.py$|^airflow/providers/.*/provider\.yaml$|^tests/providers/.*\.py$|^tests/system/providers/.*\.py$|^scripts/ci/pre_commit/update_providers_dependencies\.py$ + files: | + (?x) + ^providers/src/airflow/providers/.*\.py$ | + ^providers/src/airflow/providers/.*/provider\.yaml$ | + ^providers/tests/.*\.py$ | + ^scripts/ci/pre_commit/update_providers_dependencies\.py$ pass_filenames: false additional_dependencies: ['setuptools', 'rich>=12.4.4', 'pyyaml', 'tomli'] - id: check-extra-packages-references @@ -477,7 +482,7 @@ repos: name: Update extras in documentation entry: ./scripts/ci/pre_commit/insert_extras.py language: python - files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^airflow/providers/.*/provider\.yaml$|^Dockerfile.* + files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^providers/src/airflow/providers/.*/provider\.yaml$|^Dockerfile.* pass_filenames: false additional_dependencies: ['rich>=12.4.4', 'hatchling==1.25.0'] - id: check-extras-order @@ -510,9 +515,9 @@ repos: (?x) ^scripts/ci/pre_commit/version_heads_map\.py$| ^airflow/migrations/versions/.*$|^airflow/migrations/versions| - ^airflow/providers/fab/migrations/versions/.*$|^airflow/providers/fab/migrations/versions| + ^providers/src/airflow/providers/fab/migrations/versions/.*$|^providers/src/airflow/providers/fab/migrations/versions| ^airflow/utils/db.py$| - ^airflow/providers/fab/auth_manager/models/db.py$ + ^providers/src/airflow/providers/fab/auth_manager/models/db.py$ additional_dependencies: ['packaging','google-re2'] - id: update-version name: Update versions in docs @@ -533,11 +538,11 @@ repos: language: pygrep entry: > (?i) - .*https://github.*[0-9]/tests/system/providers| - .*https://github.*/main/tests/system/providers| - .*https://github.*/master/tests/system/providers| - .*https://github.*/main/airflow/providers/.*/example_dags/| - .*https://github.*/master/airflow/providers/.*/example_dags/ + .*https://github.*[0-9]/providers/tests/system/| + .*https://github.*/main/providers/tests/system/| + .*https://github.*/master/providers/tests/system/| + .*https://github.*/main/providers/src/airflow/providers/.*/example_dags/| + .*https://github.*/master/providers/src/airflow/providers/.*/example_dags/ pass_filenames: true files: ^docs/apache-airflow-providers-.*\.rst - id: check-safe-filter-usage-in-html @@ -560,7 +565,7 @@ repos: description: Use AirflowProviderDeprecationWarning in providers entry: "^\\s*DeprecationWarning*" pass_filenames: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ - id: check-urlparse-usage-in-code language: pygrep name: Don't use urlparse in code @@ -601,28 +606,28 @@ repos: ^airflow/config_templates/| ^airflow/models/baseoperator.py$| ^airflow/operators/__init__.py$| - ^airflow/providers/amazon/aws/hooks/emr.py$| - ^airflow/providers/amazon/aws/operators/emr.py$| - ^airflow/providers/apache/cassandra/hooks/cassandra.py$| - ^airflow/providers/apache/hive/operators/hive_stats.py$| - ^airflow/providers/apache/hive/transfers/vertica_to_hive.py$| - ^airflow/providers/apache/spark/decorators/| - ^airflow/providers/apache/spark/hooks/| - ^airflow/providers/apache/spark/operators/| - ^airflow/providers/exasol/hooks/exasol.py$| - ^airflow/providers/fab/auth_manager/security_manager/| - ^airflow/providers/google/cloud/hooks/bigquery.py$| - ^airflow/providers/google/cloud/operators/cloud_build.py$| - ^airflow/providers/google/cloud/operators/dataproc.py$| - ^airflow/providers/google/cloud/operators/mlengine.py$| - ^airflow/providers/microsoft/azure/hooks/cosmos.py$| - ^airflow/providers/microsoft/winrm/hooks/winrm.py$| + ^providers/src/airflow/providers/amazon/aws/hooks/emr.py$| + ^providers/src/airflow/providers/amazon/aws/operators/emr.py$| + ^providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py$| + ^providers/src/airflow/providers/apache/hive/operators/hive_stats.py$| + ^providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py$| + ^providers/src/airflow/providers/apache/spark/decorators/| + ^providers/src/airflow/providers/apache/spark/hooks/| + ^providers/src/airflow/providers/apache/spark/operators/| + ^providers/src/airflow/providers/exasol/hooks/exasol.py$| + ^providers/src/airflow/providers/fab/auth_manager/security_manager/| + ^providers/src/airflow/providers/google/cloud/hooks/bigquery.py$| + ^providers/src/airflow/providers/google/cloud/operators/cloud_build.py$| + ^providers/src/airflow/providers/google/cloud/operators/dataproc.py$| + ^providers/src/airflow/providers/google/cloud/operators/mlengine.py$| + ^providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py$| + ^providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py$| ^airflow/www/fab_security/manager.py$| ^docs/.*commits.rst$| ^docs/apache-airflow-providers-apache-cassandra/connections/cassandra.rst$| - ^airflow/providers/microsoft/winrm/operators/winrm.py$| - ^airflow/providers/opsgenie/hooks/opsgenie.py$| - ^airflow/providers/redis/provider.yaml$| + ^providers/src/airflow/providers/microsoft/winrm/operators/winrm.py$| + ^providers/src/airflow/providers/opsgenie/hooks/opsgenie.py$| + ^providers/src/airflow/providers/redis/provider.yaml$| ^airflow/serialization/serialized_objects.py$| ^airflow/ui/pnpm-lock.yaml$| ^airflow/utils/db.py$| @@ -647,6 +652,7 @@ repos: ^newsfragments/41761.significant.rst$| ^scripts/ci/pre_commit/vendor_k8s_json_schema.py$| ^tests/| + ^providers/tests/| ^.pre-commit-config\.yaml$| ^.*CHANGELOG\.(rst|txt)$| ^.*RELEASE_NOTES\.rst$| @@ -682,7 +688,7 @@ repos: ^airflow/decorators/.*$| ^airflow/hooks/.*$| ^airflow/operators/.*$| - ^airflow/providers/.*$| + ^providers/src/airflow/providers/.*$| ^airflow/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage @@ -697,7 +703,7 @@ repos: ^airflow/decorators/.*$| ^airflow/hooks/.*$| ^airflow/operators/.*$| - ^airflow/providers/.*$| + ^providers/src/airflow/providers/.*$| ^airflow/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage @@ -708,16 +714,16 @@ repos: pass_filenames: true files: > (?x) - ^airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/|airflow/providers/standard/operators/bash.py + ^providers/src/airflow/providers/.*\.py$ + exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat description: Make sure you import from airflow.provider.common.compat.lineage.hook instead of airflow.lineage.hook. entry: ./scripts/ci/pre_commit/check_get_lineage_collector_providers.py - files: ^airflow/providers/.*\.py$ - exclude: ^airflow/providers/common/compat/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ + exclude: ^providers/src/airflow/providers/common/compat/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] - id: check-decorated-operator-implements-custom-name name: Check @task decorator implements custom_operator_name @@ -730,7 +736,7 @@ repos: name: Verify usage of Airflow deprecation classes in core entry: category=DeprecationWarning|category=PendingDeprecationWarning files: \.py$ - exclude: ^airflow/configuration\.py$|^airflow/providers|^scripts/in_container/verify_providers\.py$|^tests/.*$ + exclude: ^airflow/configuration\.py$|^providers/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|^(providers/)?tests/.*$|^dev/tests_common/ pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep @@ -772,7 +778,7 @@ repos: name: Check if aiobotocore is an optional dependency only entry: ./scripts/ci/pre_commit/check_aiobotocore_optional.py language: python - files: ^airflow/providers/.*/provider\.yaml$ + files: ^providers/src/airflow/providers/.*/provider\.yaml$ pass_filenames: true additional_dependencies: ['click', 'rich>=12.4.4', 'pyyaml'] require_serial: true @@ -877,12 +883,6 @@ repos: entry: ./scripts/ci/pre_commit/compile_www_assets_dev.py pass_filenames: false additional_dependencies: ['yarn@1.22.21'] - - id: check-providers-init-file-missing - name: Provider init file is missing - pass_filenames: false - always_run: true - entry: ./scripts/ci/pre_commit/check_providers_init.py - language: python - id: check-providers-subpackages-init-file-exist name: Provider subpackage init files are there pass_filenames: false @@ -944,8 +944,8 @@ repos: name: Check if system tests have required segments of code entry: ./scripts/ci/pre_commit/check_system_tests.py language: python - files: ^tests/system/.*/example_[^/]*\.py$ - exclude: ^tests/system/providers/google/cloud/bigquery/example_bigquery_queries\.py$ + files: ^(providers/)?tests/system/.*/example_[^/]*\.py$ + exclude: ^providers/tests/system/google/cloud/bigquery/example_bigquery_queries\.py$ pass_filenames: true additional_dependencies: ['rich>=12.4.4'] - id: generate-pypi-readme @@ -1177,12 +1177,21 @@ repos: entry: "^\\s*from re\\s|^\\s*import re\\s" pass_filenames: true files: \.py$ - exclude: ^airflow/providers|^dev/.*\.py$|^scripts/.*\.py$|^tests/|^\w+_tests/|^docs/.*\.py$|^airflow/utils/helpers.py$|^hatch_build.py$ + exclude: | + (?x) + ^airflow/utils/helpers.py$ | + ^providers/src/airflow/providers/ | + ^(providers/)?tests/ | + ^dev/.*\.py$ | + ^scripts/.*\.py$ | + ^\w+_tests/ | + ^docs/.*\.py$ | + ^hatch_build.py$ - id: check-provider-docs-valid name: Validate provider doc files entry: ./scripts/ci/pre_commit/check_provider_docs.py language: python - files: ^airflow/providers/.*/provider\.yaml|^docs/.* + files: ^providers/src/airflow/providers/.*/provider\.yaml|^docs/.* additional_dependencies: ['rich>=12.4.4', 'pyyaml', 'jinja2'] require_serial: true - id: bandit @@ -1273,9 +1282,20 @@ repos: - id: mypy-airflow name: Run mypy for airflow language: python - entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages + entry: ./scripts/ci/pre_commit/mypy.py files: \.py$ - exclude: ^.*/.*_vendor/|^airflow/migrations|^airflow/providers|^dev|^scripts|^docs|^provider_packages|^tests/providers|^tests/system/providers|^tests/dags/test_imports.py|^clients/python/test_.*\.py|^performance + exclude: | + (?x) + ^.*/.*_vendor/ | + ^airflow/migrations | + ^providers/ | + ^dev | + ^scripts | + ^docs | + ^provider_packages | + ^performance/ | + ^tests/dags/test_imports.py | + ^clients/python/test_.*\.py require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow @@ -1291,7 +1311,7 @@ repos: name: Run mypy for providers language: python entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages - files: ^airflow/providers/.*\.py$|^tests/providers/.*\.py$|^tests/system/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$|^providers/tests//.*\.py$ exclude: ^.*/.*_vendor/ require_serial: true additional_dependencies: ['rich>=12.4.4'] @@ -1299,7 +1319,7 @@ repos: stages: ['manual'] name: Run mypy for providers (manual) language: python - entry: ./scripts/ci/pre_commit/mypy_folder.py airflow/providers + entry: ./scripts/ci/pre_commit/mypy_folder.py providers/src/airflow/providers pass_filenames: false files: ^.*\.py$ require_serial: true @@ -1325,14 +1345,14 @@ repos: name: Validate provider.yaml files entry: ./scripts/ci/pre_commit/check_provider_yaml_files.py language: python - files: ^airflow/providers/.*/provider\.yaml$ + files: ^providers/src/airflow/providers/.*/provider\.yaml$ additional_dependencies: ['rich>=12.4.4'] require_serial: true - id: check-template-fields-valid name: Check templated fields mapped in operators/sensors language: python entry: ./scripts/ci/pre_commit/check_template_fields.py - files: ^airflow/.*/sensors/.*\.py$|^airflow/.*/operators/.*\.py$ + files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] require_serial: true - id: update-migration-references diff --git a/Dockerfile b/Dockerfile index 4525a717728a..ce229c75face 100644 --- a/Dockerfile +++ b/Dockerfile @@ -877,8 +877,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/Dockerfile.ci b/Dockerfile.ci index 304d2a4a2d46..a3e982614de6 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -676,8 +676,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1350,6 +1355,7 @@ RUN bash /scripts/docker/install_pipx_tools.sh # We copy pyproject.toml and other files needed to perform setup of dependencies # So in case pyproject.toml changes we can install latest dependencies required. COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml +COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ COPY generated/* ${AIRFLOW_SOURCES}/generated/ COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 246e0e95ec61..2dc425daa054 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -113,7 +113,6 @@ clear_task_instances, ) from airflow.models.tasklog import LogTemplate -from airflow.providers.fab import __version__ as FAB_VERSION from airflow.secrets.local_filesystem import LocalFilesystemBackend from airflow.security import permissions from airflow.settings import json @@ -796,6 +795,9 @@ def _upgrade_outdated_dag_access_control(access_control=None): """Look for outdated dag level actions in DAG access_controls and replace them with updated actions.""" if access_control is None: return None + + from airflow.providers.fab import __version__ as FAB_VERSION + updated_access_control = {} for role, perms in access_control.items(): if packaging_version.parse(FAB_VERSION) >= packaging_version.parse("1.3.0"): diff --git a/airflow/providers/.gitignore b/airflow/providers/.gitignore deleted file mode 100644 index 9b4a1a9d8f3e..000000000000 --- a/airflow/providers/.gitignore +++ /dev/null @@ -1 +0,0 @@ -get_provider_info.py diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 2c673063cb23..1b1ca469f21d 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -31,7 +31,7 @@ from dataclasses import dataclass from functools import wraps from time import perf_counter -from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, NoReturn, TypeVar +from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, TypeVar from packaging.utils import canonicalize_name @@ -362,7 +362,7 @@ def _correctness_check(provider_package: str, class_name: str, provider_info: Pr # We want to have better control over initialization of parameters and be able to debug and test it # So we add our own decorator -def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, NoReturn]], Callable[PS, None]]: +def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, None]], Callable[PS, None]]: """ Decorate and cache provider info. @@ -370,7 +370,7 @@ def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, NoReturn]], C :param cache_name: Name of the cache """ - def provider_info_cache_decorator(func: Callable[PS, NoReturn]) -> Callable[PS, None]: + def provider_info_cache_decorator(func: Callable[PS, None]) -> Callable[PS, None]: @wraps(func) def wrapped_function(*args: PS.args, **kwargs: PS.kwargs) -> None: providers_manager_instance = args[0] diff --git a/airflow/settings.py b/airflow/settings.py index 7a805f64a29c..a6adbbcf9ff7 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -377,7 +377,8 @@ def is_called_from_test_code(self) -> tuple[bool, traceback.FrameSummary | None] and not tb.filename == AIRFLOW_UTILS_SESSION_PATH ] if any( - filename.endswith("conftest.py") or filename.endswith("tests/test_utils/db.py") + filename.endswith("conftest.py") + or filename.endswith("dev/airflow_common_pytest/test_utils/db.py") for filename, _, _, _ in airflow_frames ): # This is a fixture call or testing utilities diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index 422a9f027e1e..f064a13a773b 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -212,8 +212,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-provider-yaml-valid | Validate provider.yaml files | * | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-providers-init-file-missing | Provider init file is missing | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-providers-subpackages-init-file-exist | Provider subpackage init files are there | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-pydevd-left-in-code | Check for pydevd debug statements accidentally left | | diff --git a/contributing-docs/testing/system_tests.rst b/contributing-docs/testing/system_tests.rst index 867b89582f16..cc64d25e90cb 100644 --- a/contributing-docs/testing/system_tests.rst +++ b/contributing-docs/testing/system_tests.rst @@ -35,7 +35,7 @@ Airflow system tests are pretty special because they serve three purposes: Old System Tests ---------------- -The system tests derive from the ``tests.test_utils.system_test_class.SystemTests`` class. +The system tests derive from the ``dev.tests_common.test_utils.system_test_class.SystemTests`` class. Old versions of System tests should also be marked with ``@pytest.marker.system(SYSTEM)`` where ``system`` designates the system to be tested (for example, ``google.cloud``). These tests are skipped by default. diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index dff3835a02fb..468f10b68d4c 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -952,7 +952,7 @@ will ask you to rebuild the image if it is needed and some new dependencies shou .. code-block:: bash - breeze testing tests tests/providers/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG + breeze testing tests providers/tests/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG You can run the whole test suite without adding the test target: @@ -1146,7 +1146,7 @@ directly to the container. .. code-block:: bash - pytest tests/providers//test.py + pytest providers/tests//test.py 4. Iterate with the tests and providers. Both providers and tests are mounted from local sources so changes you do locally in both - tests and provider sources are immediately reflected inside the @@ -1171,7 +1171,7 @@ are not part of the public API. We deal with it in one of the following ways: 1) If the whole provider is supposed to only work for later airflow version, we remove the whole provider by excluding it from compatibility test configuration (see below) -2) Some compatibility shims are defined in ``tests/test_utils/compat.py`` - and they can be used to make the +2) Some compatibility shims are defined in ``dev.tests_common.test_utils/compat.py`` - and they can be used to make the tests compatible - for example importing ``ParseImportError`` after the exception has been renamed from ``ImportError`` and it would fail in Airflow 2.9, but we have a fallback import in ``compat.py`` that falls back to old import automatically, so all tests testing / expecting ``ParseImportError`` should import @@ -1184,7 +1184,7 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python - from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS @pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="The tests should be skipped for Airflow < 2.8") @@ -1197,6 +1197,9 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python + from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES + + @pytest.mark.skipif( RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES, reason="Plugin initialization is done early in case of packages" ) @@ -1280,7 +1283,7 @@ In case you want to reproduce canary run, you need to add ``--clean-airflow-inst .. code-block:: bash - pytest tests/providers//test.py + pytest providers/tests//test.py 7. Iterate with the tests diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg index 0ddded9468a5..d52aa78d7ec1 100644 --- a/dev/breeze/doc/images/output_build-docs.svg +++ b/dev/breeze/doc/images/output_build-docs.svg @@ -203,32 +203,32 @@ Build documents. ╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---docs-only-dOnly build documentation. ---spellcheck-only-sOnly run spell checking. ---clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +--clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     artifacts before the build - useful for a clean build.                            ---one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        +--one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        errors.                                                                           ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern         +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern         matching the full package name, for example `apache-airflow-providers-*`. Useful  when you want to selectseveral similarly named packages together.                 (TEXT)                                                                            ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) [default: autodetect]                                          ---package-listOptional, contains comma-separated list of package ids that are processed for     +--package-listOptional, contains comma-separated list of package ids that are processed for     documentation building, and document publishing. It is an easier alternative to   adding individual packages as arguments to every command. This overrides the      packages passed as arguments.                                                     (TEXT)                                                                            ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_compile-ui-assets.svg b/dev/breeze/doc/images/output_compile-ui-assets.svg index 4721274d9cf5..3b6304558479 100644 --- a/dev/breeze/doc/images/output_compile-ui-assets.svg +++ b/dev/breeze/doc/images/output_compile-ui-assets.svg @@ -104,14 +104,14 @@ Compiles ui assets. ╭─ Compile ui assets flag ─────────────────────────────────────────────────────────────────────────────────────────────╮ ---devRun development version of assets compilation - it will not quit and automatically recompile assets +--devRun development version of assets compilation - it will not quit and automatically recompile assets on-the-fly when they are changed.                                                                   ---force-cleanForce cleanup of compile assets before building them. +--force-cleanForce cleanup of compile assets before building them. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.svg b/dev/breeze/doc/images/output_release-management_add-back-references.svg index b2b90c9e2872..37feab7831e7 100644 --- a/dev/breeze/doc/images/output_release-management_add-back-references.svg +++ b/dev/breeze/doc/images/output_release-management_add-back-references.svg @@ -151,14 +151,14 @@ Command to add back references for documentation to make it backward compatible. ╭─ Add Back References to Docs ────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg index f07aa12760d9..62ed9b25dda6 100644 --- a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg +++ b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg @@ -151,16 +151,16 @@ Generates content for issue to test the release. ╭─ Generate issue content flags ───────────────────────────────────────────────────────────────────────────────────────╮ ---disable-progressDisable progress bar ---excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) ---github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      +--disable-progressDisable progress bar +--excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) +--github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      variable set. Can be generated with:                                                     https://github.com/settings/tokens/new?description=Read%20sssues&scopes=repo:status      (TEXT)                                                                                   ---only-available-in-distOnly consider package ids with packages prepared in the dist folder +--only-available-in-distOnly consider package ids with packages prepared in the dist folder ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg index 19a549cc5a95..6bf07f9bc7f3 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg @@ -187,28 +187,28 @@ Prepare CHANGELOG, README and COMMITS information for providers. ╭─ Provider documentation preparation flags ───────────────────────────────────────────────────────────────────────────╮ ---base-branchBase branch to use as diff for documentation generation (used for releasing from  +--base-branchBase branch to use as diff for documentation generation (used for releasing from  old branch)                                                                       (TEXT)                                                                            ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   confirms releasefor providers prepared for release - useful to test the script in non-interactive mode in CI.                                                       ---only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     +--only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     documentation                                                                     ---reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   +--reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   you need to regenerate documentation.                                             ---skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    +--skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    default, the remote is recreated and fetched to make sure that it's up to date    and that recent commits are not missing                                           ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg index 16e211ec4202..3c4e468aba0c 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg @@ -187,28 +187,28 @@ Prepare sdist/whl packages of Airflow Providers. ╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---clean-distClean dist directory before building packages. Useful when you want to build    +--clean-distClean dist directory before building packages. Useful when you want to build    multiple packages  in a clean environment                                       ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---package-formatFormat of packages.(wheel | sdist | both)[default: wheel] ---package-list-fileRead list of packages from text file (one package per line).(FILENAME) ---skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--package-list-fileRead list of packages from text file (one package per line).(FILENAME) +--skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     debugging and developing changes to the build process.                          ---skip-tag-checkSkip checking if the tag already exists in the remote repository ---version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) ---package-listOptional, contains comma-separated list of package ids that are processed for   +--skip-tag-checkSkip checking if the tag already exists in the remote repository +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +--package-listOptional, contains comma-separated list of package ids that are processed for   documentation building, and document publishing. It is an easier alternative to adding individual packages as arguments to every command. This overrides the    packages passed as arguments.                                                   (TEXT)                                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.svg b/dev/breeze/doc/images/output_release-management_publish-docs.svg index db974991886e..7063e3e6fbff 100644 --- a/dev/breeze/doc/images/output_release-management_publish-docs.svg +++ b/dev/breeze/doc/images/output_release-management_publish-docs.svg @@ -208,33 +208,33 @@ Command to publish generated documentation to airflow-site ╭─ Publish Docs ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---override-versioned-sOverrides versioned directories. ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern      +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--override-versioned-sOverrides versioned directories. +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern      matching the full package name, for example `apache-airflow-providers-*`.      Useful when you want to selectseveral similarly named packages together.       (TEXT)                                                                         ---package-listOptional, contains comma-separated list of package ids that are processed for  +--package-listOptional, contains comma-separated list of package ids that are processed for  documentation building, and document publishing. It is an easier alternative   to adding individual packages as arguments to every command. This overrides    the packages passed as arguments.                                              (TEXT)                                                                         ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ---parallelismMaximum number of processes to use while running the operation in parallel. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---skip-cleanupSkip cleanup of temporary files created during parallel run. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--skip-cleanupSkip cleanup of temporary files created during parallel run. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg index 4f29325c201c..4b29f11dd3fd 100644 --- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg +++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg @@ -183,9 +183,9 @@ Generate requirements for selected provider. ╭─ Generate provider requirements flags ───────────────────────────────────────────────────────────────────────────────╮ ---pythonPython version to update sbom from. (defaults to all historical python versions) +--pythonPython version to update sbom from. (defaults to all historical python versions) (3.6 | 3.7 | 3.8 | 3.9 | 3.10 | 3.11 | 3.12)                                     ---provider-idProvider id to generate the requirements for                                                   +--provider-idProvider id to generate the requirements for                                                   (airbyte | alibaba | amazon | apache.beam | apache.cassandra | apache.drill | apache.druid |   apache.flink | apache.hdfs | apache.hive | apache.iceberg | apache.impala | apache.kafka |     apache.kylin | apache.livy | apache.pig | apache.pinot | apache.spark | apprise | arangodb |   @@ -198,25 +198,25 @@ | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp |        snowflake | sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica |        weaviate | yandex | ydb | zendesk)                                                             ---provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    +--provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    value to account for the most recent version of the provider                                   (TEXT)                                                                                         ---forceForce update providers requirements even if they already exist. +--forceForce update providers requirements even if they already exist. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg index 47e73326a4af..71feee5875aa 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg @@ -174,7 +174,7 @@ Check that all parameters are put in groups. ╭─ Check all params in groups flags ───────────────────────────────────────────────────────────────────────────────────╮ ---commandCommand(s) to regenerate images for (optional, might be repeated)                                       +--commandCommand(s) to regenerate images for (optional, might be repeated)                                       (build-docs | ci | ci-image | ci-image:build | ci-image:pull | ci-image:verify |                        ci:find-backtracking-candidates | ci:fix-ownership | ci:free-space | ci:get-workflow-info |             ci:resource-check | ci:selective-check | cleanup | compile-ui-assets | compile-www-assets | down | exec @@ -202,9 +202,9 @@ testing:tests)                                                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg index bd11568d5a17..83018c3d8d39 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg @@ -186,8 +186,8 @@ Regenerate breeze command images. ╭─ Image regeneration option ──────────────────────────────────────────────────────────────────────────────────────────╮ ---forceForces regeneration of all images ---commandCommand(s) to regenerate images for (optional, might be repeated)                                    +--forceForces regeneration of all images +--commandCommand(s) to regenerate images for (optional, might be repeated)                                    (build-docs | ci | ci-image | ci-image:build | ci-image:pull | ci-image:verify |                     ci:find-backtracking-candidates | ci:fix-ownership | ci:free-space | ci:get-workflow-info |          ci:resource-check | ci:selective-check | cleanup | compile-ui-assets | compile-www-assets | down |   @@ -214,13 +214,13 @@ | setup:version | shell | start-airflow | static-checks | testing | testing:db-tests |               testing:docker-compose-tests | testing:helm-tests | testing:integration-tests | testing:non-db-tests | testing:tests)                                                                                     ---check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used -together with --command flag or --force.                                                             +--check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used +together with --command flag or --force.                                                             ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_static-checks.svg b/dev/breeze/doc/images/output_static-checks.svg index ade76225d105..e3c89b304289 100644 --- a/dev/breeze/doc/images/output_static-checks.svg +++ b/dev/breeze/doc/images/output_static-checks.svg @@ -351,9 +351,9 @@ check-persist-credentials-disabled-in-github-workflows |                          check-pre-commit-information-consistent | check-provide-create-sessions-imports | check-provider-docs-valid | check-provider-yaml-valid |                           -check-providers-init-file-missing | check-providers-subpackages-init-file-exist | -check-pydevd-left-in-code | check-revision-heads-map |                            -check-safe-filter-usage-in-html | check-sql-dependency-common-data-structure |    +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |         +check-revision-heads-map | check-safe-filter-usage-in-html |                      +check-sql-dependency-common-data-structure |                                      check-start-date-not-used-in-defaults | check-system-tests-present |              check-system-tests-tocs | check-taskinstance-tis-attrs |                          check-template-context-variable-in-sync | check-template-fields-valid |           diff --git a/dev/breeze/doc/images/output_static-checks.txt b/dev/breeze/doc/images/output_static-checks.txt index 0b4fae743bd1..e917996931d6 100644 --- a/dev/breeze/doc/images/output_static-checks.txt +++ b/dev/breeze/doc/images/output_static-checks.txt @@ -1 +1 @@ -769905ba0e5eea7f79c37b2d047699e0 +08a7e37cd651e4d1eb702cb347d9b061 diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py index c6cc343ae4ae..f97b6b573a46 100644 --- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py @@ -607,7 +607,7 @@ def _rebuild_k8s_image( COPY --chown=airflow:0 airflow/example_dags/ /opt/airflow/dags/ -COPY --chown=airflow:0 airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/ +COPY --chown=airflow:0 providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/ ENV GUNICORN_CMD_ARGS='--preload' AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL=0 """ diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 46ab710dbfa7..61b260bea09a 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -791,7 +791,7 @@ def _run_test_command( perform_environment_checks() if skip_providers: ignored_path_list = [ - f"--ignore=tests/providers/{provider_id.replace('.','/')}" + f"--ignore=providers/tests/{provider_id.replace('.','/')}" for provider_id in skip_providers.split(" ") ] extra_pytest_args = (*extra_pytest_args, *ignored_path_list) diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 03873ed8ac7b..e684acd24ad2 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -71,7 +71,6 @@ "check-provide-create-sessions-imports", "check-provider-docs-valid", "check-provider-yaml-valid", - "check-providers-init-file-missing", "check-providers-subpackages-init-file-exist", "check-pydevd-left-in-code", "check-revision-heads-map", diff --git a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py index 88ad3e8c8cf3..ebe894029496 100644 --- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py +++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py @@ -37,7 +37,7 @@ render_template, tag_exists_for_provider, ) -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_SRC, AIRFLOW_SOURCES_ROOT from airflow_breeze.utils.run_utils import run_command LICENCE_RST = """ @@ -83,7 +83,7 @@ def copy_provider_sources_to_target(provider_id: str) -> Path: rmtree(target_provider_root_path, ignore_errors=True) target_provider_root_path.mkdir(parents=True) source_provider_sources_path = get_source_package_path(provider_id) - relative_provider_path = source_provider_sources_path.relative_to(AIRFLOW_SOURCES_ROOT) + relative_provider_path = source_provider_sources_path.relative_to(AIRFLOW_PROVIDERS_SRC) target_providers_sub_folder = target_provider_root_path / relative_provider_path get_console().print( f"[info]Copying provider sources: {source_provider_sources_path} -> {target_providers_sub_folder}" diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index ef7779afd974..618d064e0e41 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -23,6 +23,7 @@ import os import re import sys +from functools import lru_cache from subprocess import DEVNULL, CalledProcessError, CompletedProcess from typing import TYPE_CHECKING @@ -91,6 +92,7 @@ ("generated", "/opt/airflow/generated"), ("hooks", "/opt/airflow/hooks"), ("logs", "/root/airflow/logs"), + ("providers", "/opt/airflow/providers"), ("pyproject.toml", "/opt/airflow/pyproject.toml"), ("scripts", "/opt/airflow/scripts"), ("scripts/docker/entrypoint_ci.sh", "/entrypoint"), @@ -502,6 +504,7 @@ def check_executable_entrypoint_permissions(quiet: bool = False): get_console().print("[success]Executable permissions on entrypoints are OK[/]") +@lru_cache def perform_environment_checks(quiet: bool = False): check_docker_is_running() check_docker_version(quiet) diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index 5c22c41e0ba6..5ee2c2a3edfc 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -36,7 +36,7 @@ ) from airflow_breeze.utils.console import get_console from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_PROVIDERS_NS_PACKAGE, BREEZE_SOURCES_ROOT, DOCS_ROOT, GENERATED_PROVIDER_PACKAGES_DIR, @@ -382,7 +382,7 @@ def find_matching_long_package_names( def get_source_package_path(provider_id: str) -> Path: - return AIRFLOW_PROVIDERS_ROOT.joinpath(*provider_id.split(".")) + return AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split(".")) def get_documentation_package_path(provider_id: str) -> Path: diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 064a61771fb5..4e510cb76800 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -281,9 +281,11 @@ def find_airflow_sources_root_to_operate_on() -> Path: AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve() AIRFLOW_WWW_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "www" AIRFLOW_UI_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "ui" -TESTS_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "tests" / "providers" -SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" -AIRFLOW_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" +AIRFLOW_PROVIDERS_PROJECT = AIRFLOW_SOURCES_ROOT / "providers" +AIRFLOW_PROVIDERS_SRC = AIRFLOW_PROVIDERS_PROJECT / "src" +AIRFLOW_PROVIDERS_NS_PACKAGE = AIRFLOW_PROVIDERS_SRC / "airflow" / "providers" +TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_PROJECT / "tests" +SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_PROJECT / "tests" / "system" DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs" BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / ".build" GENERATED_DIR = AIRFLOW_SOURCES_ROOT / "generated" diff --git a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py index cad78f1e6d2d..72d07601601a 100644 --- a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py +++ b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py @@ -23,7 +23,7 @@ from airflow_breeze.utils.console import get_console from airflow_breeze.utils.github import get_tag_date -from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_ROOT, PROVIDER_DEPENDENCIES_JSON_FILE_PATH +from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_NS_PACKAGE, PROVIDER_DEPENDENCIES_JSON_FILE_PATH DEPENDENCIES = json.loads(PROVIDER_DEPENDENCIES_JSON_FILE_PATH.read_text()) @@ -67,7 +67,7 @@ def generate_providers_metadata_for_package( ) -> dict[str, dict[str, str]]: get_console().print(f"[info]Generating metadata for {provider_id}") provider_yaml_dict = yaml.safe_load( - (AIRFLOW_PROVIDERS_ROOT.joinpath(*provider_id.split(".")) / "provider.yaml").read_text() + (AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split(".")) / "provider.yaml").read_text() ) provider_metadata: dict[str, dict[str, str]] = {} last_airflow_version = START_AIRFLOW_VERSION_FROM diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py index 8c5d63748cb7..bb5ccb1e8290 100644 --- a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py +++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py @@ -19,14 +19,19 @@ import json import os -from glob import glob from pathlib import Path from typing import Any +from airflow_breeze.utils.path_utils import ( + AIRFLOW_PROVIDERS_NS_PACKAGE, + AIRFLOW_SOURCES_ROOT, + SYSTEM_TESTS_PROVIDERS_ROOT, +) + CONSOLE_WIDTH = 180 -ROOT_DIR = Path(__file__).parents[5].resolve() -PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" + +PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_SOURCES_ROOT / "airflow" / "provider.yaml.schema.json" def _load_schema() -> dict[str, Any]: @@ -36,22 +41,17 @@ def _load_schema() -> dict[str, Any]: def _filepath_to_module(filepath: str): - return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + # TODO: handle relative to providers project + return str(Path(filepath).relative_to(AIRFLOW_SOURCES_ROOT)).replace("/", ".") def _filepath_to_system_tests(filepath: str): - return str( - ROOT_DIR - / "tests" - / "system" - / "providers" - / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") - ) + return str(SYSTEM_TESTS_PROVIDERS_ROOT / Path(filepath).relative_to(AIRFLOW_PROVIDERS_NS_PACKAGE)) def get_provider_yaml_paths(): """Returns list of provider.yaml files""" - return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True)) + return sorted(AIRFLOW_PROVIDERS_NS_PACKAGE.glob("**/provider.yaml")) def pretty_format_path(path: str, start: str) -> str: diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 840bf2fcad6d..23fe5b6e1965 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -25,7 +25,7 @@ from airflow_breeze.global_constants import PIP_VERSION from airflow_breeze.utils.console import Output, get_console from airflow_breeze.utils.packages import get_excluded_provider_folders, get_suspended_provider_folders -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, TESTS_PROVIDERS_ROOT from airflow_breeze.utils.run_utils import run_command from airflow_breeze.utils.virtualenv_utils import create_temp_venv @@ -113,9 +113,9 @@ def get_ignore_switches_for_provider(provider_folders: list[str]) -> list[str]: for providers in provider_folders: args.extend( [ - f"--ignore=tests/providers/{providers}", - f"--ignore=tests/system/providers/{providers}", - f"--ignore=tests/integration/providers/{providers}", + f"--ignore=providers/tests/{providers}", + f"--ignore=providers/tests/system/{providers}", + f"--ignore=providers/tests/integration/{providers}", ] ) return args @@ -161,7 +161,7 @@ def get_excluded_provider_args(python_version: str) -> list[str]: "tests/operators/test_python.py::TestPythonVirtualenvOperator::test_airflow_context", "--assert=plain", ], - "Providers": ["tests/providers"], + "Providers": ["providers/tests"], "PythonVenv": [ "tests/operators/test_python.py::TestPythonVirtualenvOperator", ], @@ -182,11 +182,10 @@ def get_excluded_provider_args(python_version: str) -> list[str]: NO_RECURSE_DIRS = [ "tests/_internals", "tests/dags_with_system_exit", - "tests/test_utils", "tests/dags_corrupted", "tests/dags", - "tests/system/providers/google/cloud/dataproc/resources", - "tests/system/providers/google/cloud/gcs/resources", + "providers/tests/system/google/cloud/dataproc/resources", + "providers/tests/system/google/cloud/gcs/resources", ] @@ -255,16 +254,16 @@ def convert_test_type_to_pytest_args( providers_with_exclusions = TEST_TYPE_MAP_TO_PYTEST_ARGS["Providers"].copy() for excluded_provider in excluded_provider_list: providers_with_exclusions.append( - "--ignore=tests/providers/" + excluded_provider.replace(".", "/") + "--ignore=providers/tests/" + excluded_provider.replace(".", "/") ) return providers_with_exclusions if test_type.startswith(PROVIDERS_LIST_PREFIX): provider_list = test_type[len(PROVIDERS_LIST_PREFIX) : -1].split(",") providers_to_test = [] for provider in provider_list: - provider_path = "tests/providers/" + provider.replace(".", "/") - if (AIRFLOW_SOURCES_ROOT / provider_path).is_dir(): - providers_to_test.append(provider_path) + provider_path = TESTS_PROVIDERS_ROOT.joinpath(provider.replace(".", "/")) + if provider_path.is_dir(): + providers_to_test.append(provider_path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix()) else: get_console().print( f"[error]Provider directory {provider_path} does not exist for {provider}. " @@ -346,9 +345,9 @@ def generate_args_for_pytest( if run_db_tests_only: args.append("--run-db-tests-only") if test_type != "System": - args.append(f"--ignore={SYSTEM_TESTS}") + args.append(f"--ignore-glob=*/{SYSTEM_TESTS}") if test_type != "Integration": - args.append(f"--ignore={INTEGRATION_TESTS}") + args.append(f"--ignore-glob=*/{INTEGRATION_TESTS}") if test_type != "Helm": # do not produce warnings output for helm tests args.append(f"--warning-output-path={warnings_file}") @@ -402,12 +401,12 @@ def convert_parallel_types_to_folders( ) ) # leave only folders, strip --pytest-args that exclude some folders with `-' prefix - folders = [arg for arg in args if arg.startswith("test")] - # remove specific provider sub-folders if "tests/providers" is already in the list + folders = [arg for arg in args if arg.startswith("test") or arg.startswith("providers/tests")] + # remove specific provider sub-folders if "providers/tests" is already in the list # This workarounds pytest issues where it will only run tests from specific subfolders # if both parent and child folders are in the list # The issue in Pytest (changed behaviour in Pytest 8.2 is tracked here # https://github.com/pytest-dev/pytest/issues/12605 - if "tests/providers" in folders: - folders = [folder for folder in folders if not folder.startswith("tests/providers/")] + if "providers/tests" in folders: + folders = [folder for folder in folders if not folder.startswith("providers/tests/")] return folders diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 385cd6f14411..dfc24993f274 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -59,7 +59,7 @@ from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos from airflow_breeze.utils.packages import get_available_packages from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_PROVIDERS_NS_PACKAGE, AIRFLOW_SOURCES_ROOT, DOCS_DIR, SYSTEM_TESTS_PROVIDERS_ROOT, @@ -177,7 +177,7 @@ def __hash__(self): r"^\.github/SECURITY\.rst$", r"^airflow/.*\.py$", r"^chart", - r"^providers", + r"^providers/src/", r"^tests/system", r"^CHANGELOG\.txt", r"^airflow/config_templates/config\.yml", @@ -200,9 +200,9 @@ def __hash__(self): FileGroupForCi.KUBERNETES_FILES: [ r"^chart", r"^kubernetes_tests", - r"^airflow/providers/cncf/kubernetes/", - r"^tests/providers/cncf/kubernetes/", - r"^tests/system/providers/cncf/kubernetes/", + r"^providers/src/airflow/providers/cncf/kubernetes/", + r"^providers/tests/cncf/kubernetes/", + r"^providers/tests/system/cncf/kubernetes/", ], FileGroupForCi.ALL_PYTHON_FILES: [ r".*\.py$", @@ -211,9 +211,9 @@ def __hash__(self): r".*\.py$", ], FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES: [ - r"^airflow/providers/.*\.py$", - r"^tests/providers/.*\.py$", - r"^tests/system/providers/.*\.py$", + r"^providers/src/airflow/providers/.*\.py$", + r"^providers/tests/.*\.py$", + r"^providers/tests/system/.*\.py$", ], FileGroupForCi.ALL_DOCS_PYTHON_FILES: [ r"^docs/.*\.py$", @@ -225,6 +225,8 @@ def __hash__(self): r"^.pre-commit-config.yaml$", r"^airflow", r"^chart", + r"^providers/src/", + r"^providers/tests/", r"^tests", r"^kubernetes_tests", ], @@ -239,7 +241,7 @@ def __hash__(self): ], FileGroupForCi.TESTS_UTILS_FILES: [ r"^tests/utils/", - r"^tests/test_utils/", + r"^dev/tests_common/.*\.py$", ], } ) @@ -249,12 +251,12 @@ def __hash__(self): FileGroupForCi.ALL_AIRFLOW_PYTHON_FILES: [ r"^.*/.*_vendor/.*", r"^airflow/migrations/.*", - r"^airflow/providers/.*", + r"^providers/src/airflow/providers/.*", r"^dev/.*", r"^docs/.*", r"^provider_packages/.*", - r"^tests/providers/.*", - r"^tests/system/providers/.*", + r"^providers/tests/.*", + r"^providers/tests/system/.*", r"^tests/dags/test_imports.py", ] } @@ -286,9 +288,9 @@ def __hash__(self): r"^tests/operators/", ], SelectiveUnitTestTypes.PROVIDERS: [ - r"^airflow/providers/", - r"^tests/system/providers/", - r"^tests/providers/", + r"^providers/src/airflow/providers/", + r"^providers/tests/system/", + r"^providers/tests/", ], SelectiveUnitTestTypes.SERIALIZATION: [ r"^airflow/serialization/", @@ -308,7 +310,7 @@ def __hash__(self): def find_provider_affected(changed_file: str, include_docs: bool) -> str | None: file_path = AIRFLOW_SOURCES_ROOT / changed_file # is_relative_to is only available in Python 3.9 - we should simplify this check when we are Python 3.9+ - for provider_root in (TESTS_PROVIDERS_ROOT, SYSTEM_TESTS_PROVIDERS_ROOT, AIRFLOW_PROVIDERS_ROOT): + for provider_root in (TESTS_PROVIDERS_ROOT, SYSTEM_TESTS_PROVIDERS_ROOT, AIRFLOW_PROVIDERS_NS_PACKAGE): try: file_path.relative_to(provider_root) relative_base_path = provider_root @@ -329,7 +331,7 @@ def find_provider_affected(changed_file: str, include_docs: bool) -> str | None: if parent_dir_path == relative_base_path: break relative_path = parent_dir_path.relative_to(relative_base_path) - if (AIRFLOW_PROVIDERS_ROOT / relative_path / "provider.yaml").exists(): + if (AIRFLOW_PROVIDERS_NS_PACKAGE / relative_path / "provider.yaml").exists(): return str(parent_dir_path.relative_to(relative_base_path)).replace(os.sep, ".") # If we got here it means that some "common" files were modified. so we need to test all Providers return "Providers" diff --git a/dev/breeze/tests/test_packages.py b/dev/breeze/tests/test_packages.py index a3126b3f5549..e828f24cf1bb 100644 --- a/dev/breeze/tests/test_packages.py +++ b/dev/breeze/tests/test_packages.py @@ -47,7 +47,7 @@ get_suspended_provider_ids, validate_provider_info_with_runtime_schema, ) -from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_ROOT, AIRFLOW_SOURCES_ROOT, DOCS_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, DOCS_ROOT def test_get_available_packages(): @@ -151,7 +151,9 @@ def test_find_matching_long_package_name_bad_filter(): def test_get_source_package_path(): - assert get_source_package_path("apache.hdfs") == AIRFLOW_PROVIDERS_ROOT / "apache" / "hdfs" + assert get_source_package_path("apache.hdfs") == AIRFLOW_SOURCES_ROOT.joinpath( + "providers", "src", "airflow", "providers", "apache", "hdfs" + ) def test_get_documentation_package_path(): @@ -318,9 +320,12 @@ def test_get_provider_details(): assert provider_details.provider_id == "asana" assert provider_details.full_package_name == "airflow.providers.asana" assert provider_details.pypi_package_name == "apache-airflow-providers-asana" - assert ( - provider_details.source_provider_package_path - == AIRFLOW_SOURCES_ROOT / "airflow" / "providers" / "asana" + assert provider_details.source_provider_package_path == AIRFLOW_SOURCES_ROOT.joinpath( + "providers", + "src", + "airflow", + "providers", + "asana", ) assert ( provider_details.documentation_provider_package_path == DOCS_ROOT / "apache-airflow-providers-asana" @@ -496,7 +501,7 @@ def test_provider_jinja_context(): "RELEASE_NO_LEADING_ZEROS": version, "VERSION_SUFFIX": ".rc1", "PROVIDER_DESCRIPTION": "Amazon integration (including `Amazon Web Services (AWS) `__).\n", - "CHANGELOG_RELATIVE_PATH": "../../airflow/providers/amazon", + "CHANGELOG_RELATIVE_PATH": "../../providers/src/airflow/providers/amazon", "SUPPORTED_PYTHON_VERSIONS": ["3.9", "3.10", "3.11", "3.12"], "PLUGINS": [], "MIN_AIRFLOW_VERSION": "2.8.0", diff --git a/dev/breeze/tests/test_provider_documentation.py b/dev/breeze/tests/test_provider_documentation.py index e2de9fee9fbf..db770b7856a2 100644 --- a/dev/breeze/tests/test_provider_documentation.py +++ b/dev/breeze/tests/test_provider_documentation.py @@ -217,7 +217,7 @@ def test_convert_git_changes_to_table(input: str, output: str, markdown: bool, c def test_verify_changelog_exists(): assert ( _verify_changelog_exists("asana") - == AIRFLOW_SOURCES_ROOT / "airflow" / "providers" / "asana" / "CHANGELOG.rst" + == AIRFLOW_SOURCES_ROOT / "providers" / "src" / "airflow" / "providers" / "asana" / "CHANGELOG.rst" ) diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index 7ecbbf4b5bf3..de636a9893d2 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -74,7 +74,7 @@ ), ( "Providers", - ["tests/providers"], + ["providers/tests"], False, ), ( @@ -84,26 +84,26 @@ ), ( "Providers[amazon]", - ["tests/providers/amazon"], + ["providers/tests/amazon"], False, ), ( "Providers[common.io]", - ["tests/providers/common/io"], + ["providers/tests/common/io"], False, ), ( "Providers[amazon,google,apache.hive]", - ["tests/providers/amazon", "tests/providers/google", "tests/providers/apache/hive"], + ["providers/tests/amazon", "providers/tests/google", "providers/tests/apache/hive"], False, ), ( "Providers[-amazon,google,microsoft.azure]", [ - "tests/providers", - "--ignore=tests/providers/amazon", - "--ignore=tests/providers/google", - "--ignore=tests/providers/microsoft/azure", + "providers/tests", + "--ignore=providers/tests/amazon", + "--ignore=providers/tests/google", + "--ignore=providers/tests/microsoft/azure", ], False, ), @@ -269,7 +269,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), @@ -282,7 +282,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers/amazon", + "providers/tests/amazon", ], False, ), @@ -295,8 +295,8 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers/amazon", - "tests/providers/google", + "providers/tests/amazon", + "providers/tests/google", ], False, ), @@ -309,7 +309,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), @@ -334,7 +334,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), diff --git a/dev/breeze/tests/test_run_test_args.py b/dev/breeze/tests/test_run_test_args.py index 66d57c8d7822..58e341e4dfa1 100644 --- a/dev/breeze/tests/test_run_test_args.py +++ b/dev/breeze/tests/test_run_test_args.py @@ -75,7 +75,7 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma _run_test( shell_params=ShellParams(test_type="Providers"), - extra_pytest_args=(f"--ignore=tests/providers/{fake_provider_name}",), + extra_pytest_args=(f"--ignore=providers/tests/{fake_provider_name}",), python_version="3.9", output=None, test_timeout=60, @@ -88,12 +88,12 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma arg_str = " ".join(run_cmd_call.args[0]) # The command pattern we look for is " \ - # <*other args we don't care about*> --ignore tests/providers/ \ - # --ignore tests/system/providers/ --ignore tests/integration/providers/" + # <*other args we don't care about*> --ignore providers/tests/ \ + # --ignore providers/tests/system/ --ignore providers/tests/integration/" # (the container id is simply to anchor the pattern so we know where we are starting; _run_tests should # be refactored to make arg testing easier but until then we have to regex-test the entire command string match_pattern = re.compile( - f" airflow tests/providers .+ --ignore=tests/providers/{fake_provider_name} --ignore=tests/system/providers/{fake_provider_name} --ignore=tests/integration/providers/{fake_provider_name}" + f" airflow providers/tests .+ --ignore=providers/tests/{fake_provider_name} --ignore=providers/tests/system/{fake_provider_name} --ignore=providers/tests/integration/{fake_provider_name}" ) assert match_pattern.search(arg_str) @@ -104,7 +104,7 @@ def test_primary_test_arg_is_excluded_by_extra_pytest_arg(mock_run_command): test_provider_not_skipped = "ftp" _run_test( shell_params=ShellParams(test_type=f"Providers[{test_provider},{test_provider_not_skipped}]"), - extra_pytest_args=(f"--ignore=tests/providers/{test_provider}",), + extra_pytest_args=(f"--ignore=providers/tests/{test_provider}",), python_version="3.9", output=None, test_timeout=60, @@ -116,13 +116,13 @@ def test_primary_test_arg_is_excluded_by_extra_pytest_arg(mock_run_command): arg_str = " ".join(run_cmd_call.args[0]) # The command pattern we look for is " --verbosity=0 \ - # <*other args we don't care about*> --ignore=tests/providers/" - # The tests/providers/http argument has been eliminated by the code that preps the args; this is a bug, + # <*other args we don't care about*> --ignore=providers/tests/" + # The providers/tests/http argument has been eliminated by the code that preps the args; this is a bug, # bc without a directory or module arg, pytest tests everything (which we don't want!) # We check "--verbosity=0" to ensure nothing is between the airflow container id and the verbosity arg, # IOW that the primary test arg is removed match_pattern = re.compile( - f"airflow tests/providers/{test_provider_not_skipped} --verbosity=0 .+ --ignore=tests/providers/{test_provider}" + f"airflow providers/tests/{test_provider_not_skipped} --verbosity=0 .+ --ignore=providers/tests/{test_provider}" ) assert match_pattern.search(arg_str) @@ -135,7 +135,7 @@ def test_test_is_skipped_if_all_are_ignored(mock_run_command): ] # "Providers[]" scans the source tree so we need to use a real provider id _run_test( shell_params=ShellParams(test_type=f"Providers[{','.join(test_providers)}]"), - extra_pytest_args=[f"--ignore=tests/providers/{provider}" for provider in test_providers], + extra_pytest_args=[f"--ignore=providers/tests/{provider}" for provider in test_providers], python_version="3.9", output=None, test_timeout=60, diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 3b58a45ae1b9..be3922d46196 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -87,7 +87,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): print_in_color("\nOutput received:") print_in_color(received_output_as_dict) print_in_color() - assert received_value == expected_value + assert received_value == expected_value, f"Correct value for {expected_key!r}" else: print( f"\n[red]ERROR: The key '{expected_key}' missing but " @@ -320,7 +320,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "airflow/api/file.py", - "tests/providers/postgres/file.py", + "providers/tests/postgres/file.py", ), { "affected-providers-list-as-string": "amazon common.compat common.sql fab google openlineage " @@ -353,7 +353,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), ( pytest.param( - ("tests/providers/apache/beam/file.py",), + ("providers/tests/apache/beam/file.py",), { "affected-providers-list-as-string": "apache.beam google", "all-python-versions": "['3.9']", @@ -410,7 +410,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "chart/aaaa.txt", - "tests/providers/postgres/file.py", + "providers/tests/postgres/file.py", ), { "affected-providers-list-as-string": "amazon common.sql google openlineage " @@ -445,7 +445,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/providers/http/file.py", + "providers/tests/http/file.py", ), { "affected-providers-list-as-string": "amazon apache.livy " @@ -482,7 +482,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/providers/airbyte/file.py", + "providers/tests/airbyte/file.py", ), { "affected-providers-list-as-string": "airbyte", @@ -514,7 +514,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/system/utils/file.py", + "foo/other.py", ), { "affected-providers-list-as-string": None, @@ -595,7 +595,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ) ), pytest.param( - ("airflow/providers/amazon/__init__.py",), + ("providers/src/airflow/providers/amazon/__init__.py",), { "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " @@ -623,7 +623,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run including amazon tests if amazon provider files changed", ), pytest.param( - ("tests/providers/airbyte/__init__.py",), + ("providers/tests/airbyte/__init__.py",), { "affected-providers-list-as-string": "airbyte", "all-python-versions": "['3.9']", @@ -647,7 +647,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run without amazon tests if no amazon file changed", ), pytest.param( - ("airflow/providers/amazon/file.py",), + ("providers/src/airflow/providers/amazon/file.py",), { "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " @@ -677,8 +677,8 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "tests/always/test_project_structure.py", - "tests/providers/common/io/operators/__init__.py", - "tests/providers/common/io/operators/test_file_transfer.py", + "providers/tests/common/io/operators/__init__.py", + "providers/tests/common/io/operators/test_file_transfer.py", ), { "affected-providers-list-as-string": "common.compat common.io openlineage", @@ -703,9 +703,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Only Always and common providers tests should run when only common.io and tests/always changed", ), pytest.param( - ("airflow/providers/standard/operators/bash.py",), + ("providers/src/airflow/providers/standard/operators/bash.py",), { - "affected-providers-list-as-string": "celery edge standard", + "affected-providers-list-as-string": "standard", "all-python-versions": "['3.9']", "all-python-versions-list-as-string": "3.9", "python-versions": "['3.9']", @@ -720,14 +720,14 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "skip-pre-commits": "identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," "ts-compile-format-lint-ui,ts-compile-format-lint-www", "upgrade-to-newer-dependencies": "false", - "parallel-test-types-list-as-string": "Always Core Providers[celery,edge,standard] Serialization", + "parallel-test-types-list-as-string": "Always Core Providers[standard] Serialization", "needs-mypy": "true", "mypy-folders": "['providers']", }, id="Providers standard tests and Serialization tests to run when airflow bash.py changed", ), pytest.param( - ("tests/operators/bash.py",), + ("providers/tests/standard/operators/bash.py",), { "affected-providers-list-as-string": None, "all-python-versions": "['3.9']", @@ -741,12 +741,12 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "run-amazon-tests": "false", "docs-build": "false", "run-kubernetes-tests": "false", - "skip-pre-commits": "check-provider-yaml-valid,identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," + "skip-pre-commits": "identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," "ts-compile-format-lint-ui,ts-compile-format-lint-www", "upgrade-to-newer-dependencies": "false", - "parallel-test-types-list-as-string": "Always Core Operators Serialization", + "parallel-test-types-list-as-string": "Always Core Providers[standard] Serialization", "needs-mypy": "true", - "mypy-folders": "['airflow']", + "mypy-folders": "['providers']", }, id="Force Core and Serialization tests to run when tests bash changed", ), @@ -776,6 +776,32 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="All tests should be run when tests/utils/ change", ) ), + ( + pytest.param( + ("dev/tests_common/__init__.py",), + { + "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, + "all-python-versions": "['3.9']", + "all-python-versions-list-as-string": "3.9", + "python-versions": "['3.9']", + "python-versions-list-as-string": "3.9", + "ci-image-build": "true", + "prod-image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "run-amazon-tests": "true", + "docs-build": "true", + "full-tests-needed": "true", + "skip-pre-commits": "identity,mypy-airflow,mypy-dev,mypy-docs,mypy-providers", + "upgrade-to-newer-dependencies": "false", + "parallel-test-types-list-as-string": ALL_CI_SELECTIVE_TEST_TYPES, + "providers-test-types-list-as-string": ALL_PROVIDERS_SELECTIVE_TEST_TYPES, + "needs-mypy": "true", + "mypy-folders": "['airflow', 'providers', 'docs', 'dev']", + }, + id="All tests should be run when dev/tests_common/ change", + ) + ), ], ) def test_expected_output_pull_request_main( @@ -1124,7 +1150,7 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ ), ( pytest.param( - ("INTHEWILD.md", "tests/providers/asana.py"), + ("INTHEWILD.md", "providers/tests/asana.py"), ("full tests needed",), "v2-7-stable", { @@ -1200,7 +1226,7 @@ def test_expected_output_full_tests_needed( pytest.param( ( "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra cncf.kubernetes " @@ -1229,7 +1255,7 @@ def test_expected_output_full_tests_needed( ( "airflow/cli/test.py", "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra " @@ -1257,7 +1283,7 @@ def test_expected_output_full_tests_needed( pytest.param( ( "airflow/file.py", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, @@ -1347,7 +1373,7 @@ def test_expected_output_pull_request_v2_7( ( "airflow/cli/test.py", "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra " @@ -1619,7 +1645,7 @@ def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event): id="pyproject.toml changed but no dependency change", ), pytest.param( - ("airflow/providers/microsoft/azure/provider.yaml",), + ("providers/src/airflow/providers/microsoft/azure/provider.yaml",), { "upgrade-to-newer-dependencies": "false", }, @@ -1677,9 +1703,9 @@ def test_upgrade_to_newer_dependencies( id="Google provider docs changed", ), pytest.param( - ("airflow/providers/common/sql/common_sql_python.py",), + ("providers/src/airflow/providers/common/sql/common_sql_python.py",), { - "docs-list-as-string": "apache-airflow amazon apache.drill apache.druid apache.hive " + "docs-list-as-string": "amazon apache.drill apache.druid apache.hive " "apache.impala apache.pinot common.sql databricks elasticsearch " "exasol google jdbc microsoft.mssql mysql odbc openlineage " "oracle pgvector postgres presto slack snowflake sqlite teradata trino vertica ydb", @@ -1719,8 +1745,8 @@ def test_upgrade_to_newer_dependencies( id="Only Airflow docs changed", ), pytest.param( - ("airflow/providers/celery/file.py",), - {"docs-list-as-string": "apache-airflow celery cncf.kubernetes standard"}, + ("providers/src/airflow/providers/celery/file.py",), + {"docs-list-as-string": "celery cncf.kubernetes"}, id="Celery python files changed", ), pytest.param( @@ -2195,7 +2221,7 @@ def test_provider_compatibility_checks(labels: tuple[str, ...], expected_outputs id="Airflow mypy checks on airflow files with model changes.", ), pytest.param( - ("airflow/providers/a_file.py",), + ("providers/src/airflow/providers/a_file.py",), { "needs-mypy": "true", "mypy-folders": "['providers']", diff --git a/dev/example_dags/update_example_dags_paths.py b/dev/example_dags/update_example_dags_paths.py index 2104721d1bb2..ac8cf4a0feec 100755 --- a/dev/example_dags/update_example_dags_paths.py +++ b/dev/example_dags/update_example_dags_paths.py @@ -38,10 +38,10 @@ EXAMPLE_DAGS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/airflow/providers/(.*)/example_dags)(/?\".*)$" + r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/src/airflow/providers/(.*)/example_dags)(/?\".*)$" ) SYSTEM_TESTS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/tests/system/providers/(.*))(/?\".*)$" + r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/tests/system/(.*))(/?\".*)$" ) @@ -67,11 +67,11 @@ def replace_match(file: str, line: str, provider: str, version: str) -> str | No continue system_tests_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/tests/system/providers/{url_path_to_dir}" + f"/providers/tests/system/{url_path_to_dir}" ) example_dags_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/airflow/providers/{url_path_to_dir}/example_dags" + f"/providers/src/airflow/providers/{url_path_to_dir}/example_dags" ) if check_if_url_exists(system_tests_url) and index == 1: new_line = re.sub(matcher, r"\1" + system_tests_url + r"\5", line) diff --git a/dev/perf/scheduler_dag_execution_timing.py b/dev/perf/scheduler_dag_execution_timing.py index b11c73073df9..fc5c21bda7aa 100755 --- a/dev/perf/scheduler_dag_execution_timing.py +++ b/dev/perf/scheduler_dag_execution_timing.py @@ -107,7 +107,7 @@ def get_executor_under_test(dotted_path): from airflow.executors.executor_loader import ExecutorLoader if dotted_path == "MockExecutor": - from tests.test_utils.mock_executor import MockExecutor as executor + from dev.tests_common.test_utils.mock_executor import MockExecutor as executor else: executor = ExecutorLoader.load_executor(dotted_path) diff --git a/scripts/ci/pre_commit/check_providers_init.py b/dev/tests_common/__init__.py old mode 100755 new mode 100644 similarity index 62% rename from scripts/ci/pre_commit/check_providers_init.py rename to dev/tests_common/__init__.py index 33def71253f3..bf2b7d4f3e2a --- a/scripts/ci/pre_commit/check_providers_init.py +++ b/dev/tests_common/__init__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -18,14 +17,14 @@ # under the License. from __future__ import annotations -import sys +import os from pathlib import Path -AIRFLOW_SOURCES = Path(__file__).parents[3] -PROVIDERS_INIT_FILE = AIRFLOW_SOURCES / "airflow" / "providers" / "__init__.py" - -print(f"Checking if {PROVIDERS_INIT_FILE} exists.") -if PROVIDERS_INIT_FILE.exists(): - print(f"\033[0;31mERROR: {PROVIDERS_INIT_FILE} file should not exist. Deleting it.\033[0m\n") - PROVIDERS_INIT_FILE.unlink() - sys.exit(1) +# This constant is set to True if tests are run with Airflow installed from Packages rather than running +# the tests within Airflow sources. While most tests in CI are run using Airflow sources, there are +# also compatibility tests that only use `tests` package and run against installed packages of Airflow in +# for supported Airflow versions. +RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES = ( + "USE_AIRFLOW_VERSION" in os.environ + or not (Path(__file__).parents[2] / "airflow" / "__init__.py").exists() +) diff --git a/airflow/providers/airbyte/sensors/__init__.py b/dev/tests_common/_internals/__init__.py similarity index 100% rename from airflow/providers/airbyte/sensors/__init__.py rename to dev/tests_common/_internals/__init__.py diff --git a/tests/_internals/capture_warnings.py b/dev/tests_common/_internals/capture_warnings.py similarity index 100% rename from tests/_internals/capture_warnings.py rename to dev/tests_common/_internals/capture_warnings.py diff --git a/tests/_internals/forbidden_warnings.py b/dev/tests_common/_internals/forbidden_warnings.py similarity index 90% rename from tests/_internals/forbidden_warnings.py rename to dev/tests_common/_internals/forbidden_warnings.py index 324d2ff6f982..ce2487851066 100644 --- a/tests/_internals/forbidden_warnings.py +++ b/dev/tests_common/_internals/forbidden_warnings.py @@ -17,32 +17,43 @@ from __future__ import annotations +import os +from collections.abc import Sequence from pathlib import Path import pytest import yaml -TESTS_DIR = Path(__file__).parents[1].resolve() - class ForbiddenWarningsPlugin: """Internal plugin for restricting warnings during the tests run.""" node_key: str = "forbidden_warnings_node" - deprecations_ignore: Path = (TESTS_DIR / "deprecations_ignore.yml").resolve(strict=True) + deprecations_ignore: Sequence[str | os.PathLike] def __init__(self, config: pytest.Config, forbidden_warnings: tuple[str, ...]): + # Set by a pytest_configure hook in conftest + deprecations_ignore = config.inicfg["airflow_deprecations_ignore"] + if isinstance(deprecations_ignore, (str, os.PathLike)): + self.deprecations_ignore = [deprecations_ignore] + else: + self.deprecations_ignore = deprecations_ignore + excluded_cases = { # Skip: Integration and System Tests "tests/integration/", "tests/system/", + "providers/tests/integration/", + "providers/tests/system/", # Skip: DAGs for tests "tests/dags/", "tests/dags_corrupted/", "tests/dags_with_system_exit/", } - with self.deprecations_ignore.open() as fp: - excluded_cases.update(yaml.safe_load(fp)) + for path in self.deprecations_ignore: + path = Path(path).resolve() + with path.open() as fp: + excluded_cases.update(yaml.safe_load(fp)) self.config = config self.forbidden_warnings = forbidden_warnings diff --git a/dev/tests_common/pyproject.toml b/dev/tests_common/pyproject.toml new file mode 100644 index 000000000000..092ad7c7c5f5 --- /dev/null +++ b/dev/tests_common/pyproject.toml @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "airflow-dev-tests-common" +version = "0.1.0" +description = "" +classifiers = [ + "Private :: Do Not Upload", +] + +[tool.hatch.publish.index] +# Lets make doubly sure this never goes to PyPi +disable = true + +[tool.hatch.build.targets.wheel] +include = ["**/*.py"] + +[tool.hatch.build.targets.wheel.sources] +"" = "dev/tests_common" diff --git a/dev/tests_common/pytest_plugin.py b/dev/tests_common/pytest_plugin.py new file mode 100644 index 000000000000..5694fe1b3403 --- /dev/null +++ b/dev/tests_common/pytest_plugin.py @@ -0,0 +1,1436 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import json +import os +import platform +import re +import subprocess +import sys +from contextlib import ExitStack, suppress +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +import time_machine + +if TYPE_CHECKING: + from itsdangerous import URLSafeSerializer + + from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 + from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 + +# https://docs.pytest.org/en/stable/reference/reference.html#stash +capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]() +forbidden_warnings_key = pytest.StashKey["ForbiddenWarningsPlugin"]() + +keep_env_variables = "--keep-env-variables" in sys.argv + +if not keep_env_variables: + # Clear all Environment Variables that might have side effect, + # For example, defined in /files/airflow-breeze-config/variables.env + _AIRFLOW_CONFIG_PATTERN = re.compile(r"^AIRFLOW__(.+)__(.+)$") + _KEEP_CONFIGS_SETTINGS: dict[str, dict[str, set[str]]] = { + # Keep always these configurations + "always": { + "database": {"sql_alchemy_conn"}, + "core": {"sql_alchemy_conn"}, + "celery": {"result_backend", "broker_url"}, + }, + # Keep per enabled integrations + "celery": {"celery": {"*"}, "celery_broker_transport_options": {"*"}}, + "kerberos": {"kerberos": {"*"}}, + } + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + _KEEP_CONFIGS_SETTINGS["always"].update( + { + "core": { + "internal_api_url", + "fernet_key", + "database_access_isolation", + "internal_api_secret_key", + "internal_api_clock_grace", + }, + } + ) + _ENABLED_INTEGRATIONS = {e.split("_", 1)[-1].lower() for e in os.environ if e.startswith("INTEGRATION_")} + _KEEP_CONFIGS: dict[str, set[str]] = {} + for keep_settings_key in ("always", *_ENABLED_INTEGRATIONS): + if keep_settings := _KEEP_CONFIGS_SETTINGS.get(keep_settings_key): + for section, options in keep_settings.items(): + if section not in _KEEP_CONFIGS: + _KEEP_CONFIGS[section] = options + else: + _KEEP_CONFIGS[section].update(options) + for env_key in os.environ.copy(): + if m := _AIRFLOW_CONFIG_PATTERN.match(env_key): + section, option = m.group(1).lower(), m.group(2).lower() + if not (ko := _KEEP_CONFIGS.get(section)) or not ("*" in ko or option in ko): + del os.environ[env_key] + +SUPPORTED_DB_BACKENDS = ("sqlite", "postgres", "mysql") + +# A bit of a Hack - but we need to check args before they are parsed by pytest in order to +# configure the DB before Airflow gets initialized (which happens at airflow import time). +# Using env variables also handles the case, when python-xdist is used - python-xdist spawns separate +# processes and does not pass all args to them (it's done via env variables) so we are doing the +# same here and detect whether `--skip-db-tests` or `--run-db-tests-only` is passed to pytest +# and set env variables so the processes spawned by python-xdist can read the status from there +skip_db_tests = "--skip-db-tests" in sys.argv or os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true" +run_db_tests_only = ( + "--run-db-tests-only" in sys.argv or os.environ.get("_AIRFLOW_RUN_DB_TESTS_ONLY") == "true" +) + +if skip_db_tests: + if run_db_tests_only: + raise Exception("You cannot specify both --skip-db-tests and --run-db-tests-only together") + # Make sure sqlalchemy will not be usable for pure unit tests even if initialized + os.environ["AIRFLOW__CORE__SQL_ALCHEMY_CONN"] = "bad_schema:///" + os.environ["AIRFLOW__DATABASE__SQL_ALCHEMY_CONN"] = "bad_schema:///" + os.environ["_IN_UNIT_TESTS"] = "true" + # Set it here to pass the flag to python-xdist spawned processes + os.environ["_AIRFLOW_SKIP_DB_TESTS"] = "true" + +if run_db_tests_only: + # Set it here to pass the flag to python-xdist spawned processes + os.environ["_AIRFLOW_RUN_DB_TESTS_ONLY"] = "true" + +_airflow_sources = os.getenv("AIRFLOW_SOURCES", None) +AIRFLOW_SOURCES_ROOT_DIR = ( + Path(_airflow_sources) if _airflow_sources else Path(__file__).parents[2] +).resolve() +AIRFLOW_TESTS_DIR = AIRFLOW_SOURCES_ROOT_DIR / "tests" + +os.environ["AIRFLOW__CORE__PLUGINS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "plugins") +os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "dags") +os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" +os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" +os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or "/files/airflow-breeze-config/keys" +os.environ["AIRFLOW_ENABLE_AIP_44"] = os.environ.get("AIRFLOW_ENABLE_AIP_44") or "true" + +if platform.system() == "Darwin": + # mocks from unittest.mock work correctly in subprocesses only if they are created by "fork" method + # but macOS uses "spawn" by default + os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork" + + +@pytest.fixture +def reset_db(): + """Resets Airflow db.""" + + from airflow.utils import db + + db.resetdb() + + +ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", "count"] + + +@pytest.fixture(autouse=True) +def trace_sql(request): + from dev.tests_common.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip + count_queries, + trace_queries, + ) + + """Displays queries from the tests to console.""" + trace_sql_option = request.config.option.trace_sql + if not trace_sql_option: + yield + return + + terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter") + # if no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example + if terminal_reporter is None: + yield + return + + columns = [col.strip() for col in trace_sql_option.split(",")] + + def pytest_print(text): + return terminal_reporter.write_line(text) + + with ExitStack() as exit_stack: + if columns == ["num"]: + # It is very unlikely that the user wants to display only numbers, but probably + # the user just wants to count the queries. + exit_stack.enter_context(count_queries(print_fn=pytest_print)) + elif any(c in columns for c in ["time", "trace", "sql", "parameters"]): + exit_stack.enter_context( + trace_queries( + display_num="num" in columns, + display_time="time" in columns, + display_trace="trace" in columns, + display_sql="sql" in columns, + display_parameters="parameters" in columns, + print_fn=pytest_print, + ) + ) + + yield + + +@pytest.fixture(autouse=True, scope="session") +def set_db_isolation_mode(): + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + from airflow.api_internal.internal_api_call import InternalApiConfig + + InternalApiConfig.set_use_internal_api("tests", allow_tests_to_use_db=True) + + +def skip_if_database_isolation_mode(item): + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + for _ in item.iter_markers(name="skip_if_database_isolation_mode"): + pytest.skip("This test is skipped because it is not allowed in database isolation mode.") + + +def pytest_addoption(parser: pytest.Parser): + """Add options parser for custom plugins.""" + group = parser.getgroup("airflow") + group.addoption( + "--with-db-init", + action="store_true", + dest="db_init", + help="Forces database initialization before tests", + ) + group.addoption( + "--integration", + action="append", + dest="integration", + metavar="INTEGRATIONS", + help="only run tests matching integration specified: " + "[cassandra,kerberos,mongo,celery,statsd,trino]. ", + ) + group.addoption( + "--keep-env-variables", + action="store_true", + dest="keep_env_variables", + help="do not clear environment variables that might have side effect while running tests", + ) + group.addoption( + "--skip-db-tests", + action="store_true", + dest="skip_db_tests", + help="skip tests that require database", + ) + group.addoption( + "--run-db-tests-only", + action="store_true", + dest="run_db_tests_only", + help="only run tests requiring database", + ) + group.addoption( + "--backend", + action="store", + dest="backend", + metavar="BACKEND", + help="only run tests matching the backend: [sqlite,postgres,mysql].", + ) + group.addoption( + "--system", + action="append", + dest="system", + metavar="SYSTEMS", + help="only run tests matching the system specified [google.cloud, google.marketing_platform]", + ) + group.addoption( + "--include-long-running", + action="store_true", + dest="include_long_running", + help="Includes long running tests (marked with long_running marker). They are skipped by default.", + ) + group.addoption( + "--include-quarantined", + action="store_true", + dest="include_quarantined", + help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.", + ) + group.addoption( + "--exclude-virtualenv-operator", + action="store_true", + dest="exclude_virtualenv_operator", + help="Excludes virtualenv operators tests (marked with virtualenv_test marker).", + ) + group.addoption( + "--exclude-external-python-operator", + action="store_true", + dest="exclude_external_python_operator", + help="Excludes external python operator tests (marked with external_python_test marker).", + ) + allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS) + group.addoption( + "--trace-sql", + action="store", + dest="trace_sql", + help=( + "Trace SQL statements. As an argument, you must specify the columns to be " + f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]" + ), + metavar="COLUMNS", + ) + group.addoption( + "--no-db-cleanup", + action="store_false", + dest="db_cleanup", + help="Disable DB clear before each test module.", + ) + group.addoption( + "--disable-forbidden-warnings", + action="store_true", + dest="disable_forbidden_warnings", + help="Disable raising an error if forbidden warnings detected.", + ) + group.addoption( + "--disable-capture-warnings", + action="store_true", + dest="disable_capture_warnings", + help="Disable internal capture warnings.", + ) + group.addoption( + "--warning-output-path", + action="store", + dest="warning_output_path", + metavar="PATH", + help=( + "Path for resulting captured warnings. Absolute or relative to the `tests` directory. " + "If not provided or environment variable `CAPTURE_WARNINGS_OUTPUT` not set " + "then 'warnings.txt' will be used." + ), + ) + parser.addini( + name="forbidden_warnings", + type="linelist", + help="List of internal Airflow warnings which are prohibited during tests execution.", + ) + + +@pytest.fixture(autouse=True, scope="session") +def initialize_airflow_tests(request): + """Helper that setups Airflow testing environment.""" + print(" AIRFLOW ".center(60, "=")) + + from dev.tests_common.test_utils.db import initial_db_init + + # Setup test environment for breeze + home = os.path.expanduser("~") + airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow") + + print(f"Home of the user: {home}\nAirflow home {airflow_home}") + + # Initialize Airflow db if required + lock_file = os.path.join(airflow_home, ".airflow_db_initialised") + if not skip_db_tests: + if request.config.option.db_init: + from dev.tests_common.test_utils.db import initial_db_init + + print("Initializing the DB - forced with --with-db-init switch.") + initial_db_init() + elif not os.path.exists(lock_file): + print( + "Initializing the DB - first time after entering the container.\n" + "You can force re-initialization the database by adding --with-db-init switch to run-tests." + ) + initial_db_init() + # Create pid file + with open(lock_file, "w+"): + pass + else: + print( + "Skipping initializing of the DB as it was initialized already.\n" + "You can re-initialize the database by adding --with-db-init flag when running tests." + ) + integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") + if integration_kerberos == "true": + # Initialize kerberos + kerberos = os.environ.get("KRB5_KTNAME") + if kerberos: + subprocess.check_call(["kinit", "-kt", kerberos, "bob@EXAMPLE.COM"]) + else: + print("Kerberos enabled! Please setup KRB5_KTNAME environment variable") + sys.exit(1) + + +def pytest_configure(config: pytest.Config) -> None: + # Ensure that the airflow sources dir is at the end of the sys path if it's not already there. Needed to + # run import from `providers/tests/` + desired = AIRFLOW_SOURCES_ROOT_DIR.as_posix() + for path in sys.path: + if path == desired: + break + else: + sys.path.append(desired) + + if (backend := config.getoption("backend", default=None)) and backend not in SUPPORTED_DB_BACKENDS: + msg = ( + f"Provided DB backend {backend!r} not supported, " + f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}" + ) + pytest.exit(msg, returncode=6) + + config.addinivalue_line("markers", "integration(name): mark test to run with named integration") + config.addinivalue_line("markers", "backend(name): mark test to run with named backend") + config.addinivalue_line("markers", "system(name): mark test to run with named system") + config.addinivalue_line("markers", "platform(name): mark test to run with specific platform/environment") + config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)") + config.addinivalue_line( + "markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)" + ) + config.addinivalue_line( + "markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR" + ) + config.addinivalue_line( + "markers", "need_serialized_dag: mark tests that require dags in serialized form to be present" + ) + config.addinivalue_line( + "markers", + "db_test: mark tests that require database to be present", + ) + config.addinivalue_line( + "markers", + "non_db_test_override: you can mark individual tests with this marker to override the db_test marker", + ) + config.addinivalue_line( + "markers", + "virtualenv_operator: virtualenv operator tests are 'long', we should run them separately", + ) + config.addinivalue_line( + "markers", + "external_python_operator: external python operator tests are 'long', we should run them separately", + ) + config.addinivalue_line("markers", "enable_redact: do not mock redact secret masker") + config.addinivalue_line("markers", "skip_if_database_isolation_mode: skip if DB isolation is enabled") + + os.environ["_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK"] = "1" + + # Setup internal warnings plugins + if "ignore" in sys.warnoptions: + config.option.disable_forbidden_warnings = True + config.option.disable_capture_warnings = True + if not config.pluginmanager.get_plugin("warnings"): + # Internal forbidden warnings plugin depends on builtin pytest warnings plugin + config.option.disable_forbidden_warnings = True + + forbidden_warnings: list[str] | None = config.getini("forbidden_warnings") + if not config.option.disable_forbidden_warnings and forbidden_warnings: + from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin + + forbidden_warnings_plugin = ForbiddenWarningsPlugin( + config=config, + forbidden_warnings=tuple(map(str.strip, forbidden_warnings)), + ) + config.pluginmanager.register(forbidden_warnings_plugin) + config.stash[forbidden_warnings_key] = forbidden_warnings_plugin + + if not config.option.disable_capture_warnings: + from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin + + capture_warnings_plugin = CaptureWarningsPlugin( + config=config, output_path=config.getoption("warning_output_path", default=None) + ) + config.pluginmanager.register(capture_warnings_plugin) + config.stash[capture_warnings_key] = capture_warnings_plugin + + +def pytest_unconfigure(config: pytest.Config) -> None: + os.environ.pop("_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK", None) + if forbidden_warnings_plugin := config.stash.get(forbidden_warnings_key, None): + del config.stash[forbidden_warnings_key] + config.pluginmanager.unregister(forbidden_warnings_plugin) + if capture_warnings_plugin := config.stash.get(capture_warnings_key, None): + del config.stash[capture_warnings_key] + config.pluginmanager.unregister(capture_warnings_plugin) + + +def skip_if_not_marked_with_integration(selected_integrations, item): + for marker in item.iter_markers(name="integration"): + integration_name = marker.args[0] + if integration_name in selected_integrations or "all" in selected_integrations: + return + pytest.skip( + f"The test is skipped because it does not have the right integration marker. " + f"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION " + f"being one of {selected_integrations}. {item}" + ) + + +def skip_if_not_marked_with_backend(selected_backend, item): + for marker in item.iter_markers(name="backend"): + backend_names = marker.args + if selected_backend in backend_names: + return + pytest.skip( + f"The test is skipped because it does not have the right backend marker. " + f"Only tests marked with pytest.mark.backend('{selected_backend}') are run: {item}" + ) + + +def skip_if_platform_doesnt_match(marker): + allowed_platforms = ("linux", "breeze") + if not (args := marker.args): + pytest.fail(f"No platform specified, expected one of: {', '.join(map(repr, allowed_platforms))}") + elif not all(a in allowed_platforms for a in args): + pytest.fail( + f"Allowed platforms {', '.join(map(repr, allowed_platforms))}; " + f"but got: {', '.join(map(repr, args))}" + ) + if "linux" in args: + if not sys.platform.startswith("linux"): + pytest.skip("Test expected to run on Linux platform.") + if "breeze" in args: + if not os.path.isfile("/.dockerenv") or os.environ.get("BREEZE", "").lower() != "true": + raise pytest.skip( + "Test expected to run into Airflow Breeze container. " + "Maybe because it is to dangerous to run it outside." + ) + + +def skip_if_not_marked_with_system(selected_systems, item): + for marker in item.iter_markers(name="system"): + systems_name = marker.args[0] + if systems_name in selected_systems or "all" in selected_systems: + return + pytest.skip( + f"The test is skipped because it does not have the right system marker. " + f"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM " + f"being one of {selected_systems}. {item}" + ) + + +def skip_system_test(item): + for marker in item.iter_markers(name="system"): + pytest.skip( + f"The test is skipped because it has system marker. System tests are only run when " + f"--system flag with the right system ({marker.args[0]}) is passed to pytest. {item}" + ) + + +def skip_long_running_test(item): + for _ in item.iter_markers(name="long_running"): + pytest.skip( + f"The test is skipped because it has long_running marker. " + f"And --include-long-running flag is not passed to pytest. {item}" + ) + + +def skip_quarantined_test(item): + for _ in item.iter_markers(name="quarantined"): + pytest.skip( + f"The test is skipped because it has quarantined marker. " + f"And --include-quarantined flag is not passed to pytest. {item}" + ) + + +def skip_virtualenv_operator_test(item): + for _ in item.iter_markers(name="virtualenv_operator"): + pytest.skip( + f"The test is skipped because it has virtualenv_operator marker. " + f"And --exclude-virtualenv-operator flag is not passed to pytest. {item}" + ) + + +def skip_external_python_operator_test(item): + for _ in item.iter_markers(name="external_python_operator"): + pytest.skip( + f"The test is skipped because it has external_python_operator marker. " + f"And --exclude-external-python-operator flag is not passed to pytest. {item}" + ) + + +def skip_db_test(item): + if next(item.iter_markers(name="db_test"), None): + if next(item.iter_markers(name="non_db_test_override"), None): + # non_db_test can override the db_test set for example on module or class level + return + else: + pytest.skip( + f"The test is skipped as it is DB test " + f"and --skip-db-tests is flag is passed to pytest. {item}" + ) + if next(item.iter_markers(name="backend"), None): + # also automatically skip tests marked with `backend` marker as they are implicitly + # db tests + pytest.skip( + f"The test is skipped as it is DB test " + f"and --skip-db-tests is flag is passed to pytest. {item}" + ) + + +def only_run_db_test(item): + if next(item.iter_markers(name="db_test"), None) and not next( + item.iter_markers(name="non_db_test_override"), None + ): + # non_db_test at individual level can override the db_test set for example on module or class level + return + else: + if next(item.iter_markers(name="backend"), None): + # Also do not skip the tests marked with `backend` marker - as it is implicitly a db test + return + pytest.skip( + f"The test is skipped as it is not a DB tests " + f"and --run-db-tests-only flag is passed to pytest. {item}" + ) + + +def skip_if_integration_disabled(marker, item): + integration_name = marker.args[0] + environment_variable_name = "INTEGRATION_" + integration_name.upper() + environment_variable_value = os.environ.get(environment_variable_name) + if not environment_variable_value or environment_variable_value != "true": + pytest.skip( + f"The test requires {integration_name} integration started and " + f"{environment_variable_name} environment variable to be set to true (it is '{environment_variable_value}')." + f" It can be set by specifying '--integration {integration_name}' at breeze startup" + f": {item}" + ) + + +def skip_if_wrong_backend(marker: pytest.Mark, item: pytest.Item) -> None: + if not (backend_names := marker.args): + reason = ( + "`pytest.mark.backend` expect to get at least one of the following backends: " + f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}." + ) + pytest.fail(reason) + elif unsupported_backends := list(filter(lambda b: b not in SUPPORTED_DB_BACKENDS, backend_names)): + reason = ( + "Airflow Tests supports only the following backends in `pytest.mark.backend` marker: " + f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}, " + f"but got {', '.join(map(repr, unsupported_backends))}." + ) + pytest.fail(reason) + + env_name = "BACKEND" + if not (backend := os.environ.get(env_name)) or backend not in backend_names: + reason = ( + f"The test {item.nodeid!r} requires one of {', '.join(map(repr, backend_names))} backend started " + f"and {env_name!r} environment variable to be set (currently it set to {backend!r}). " + f"It can be set by specifying backend at breeze startup." + ) + pytest.skip(reason) + + +def skip_if_credential_file_missing(item): + for marker in item.iter_markers(name="credential_file"): + credential_file = marker.args[0] + credential_path = os.path.join(os.environ.get("CREDENTIALS_DIR"), credential_file) + if not os.path.exists(credential_path): + pytest.skip(f"The test requires credential file {credential_path}: {item}") + + +def pytest_runtest_setup(item): + selected_integrations_list = item.config.option.integration + selected_systems_list = item.config.option.system + + include_long_running = item.config.option.include_long_running + include_quarantined = item.config.option.include_quarantined + exclude_virtualenv_operator = item.config.option.exclude_virtualenv_operator + exclude_external_python_operator = item.config.option.exclude_external_python_operator + + for marker in item.iter_markers(name="integration"): + skip_if_integration_disabled(marker, item) + if selected_integrations_list: + skip_if_not_marked_with_integration(selected_integrations_list, item) + if selected_systems_list: + skip_if_not_marked_with_system(selected_systems_list, item) + else: + skip_system_test(item) + for marker in item.iter_markers(name="platform"): + skip_if_platform_doesnt_match(marker) + for marker in item.iter_markers(name="backend"): + skip_if_wrong_backend(marker, item) + skip_if_database_isolation_mode(item) + selected_backend = item.config.option.backend + if selected_backend: + skip_if_not_marked_with_backend(selected_backend, item) + if not include_long_running: + skip_long_running_test(item) + if not include_quarantined: + skip_quarantined_test(item) + if exclude_virtualenv_operator: + skip_virtualenv_operator_test(item) + if exclude_external_python_operator: + skip_external_python_operator_test(item) + if skip_db_tests: + skip_db_test(item) + if run_db_tests_only: + only_run_db_test(item) + skip_if_credential_file_missing(item) + + +@pytest.fixture +def frozen_sleep(monkeypatch): + """Use time-machine to "stub" sleep. + + This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards. + + If your module under test does ``import time`` and then ``time.sleep``: + + .. code-block:: python + + def test_something(frozen_sleep): + my_mod.fn_under_test() + + If your module under test does ``from time import sleep`` then you will + have to mock that sleep function directly: + + .. code-block:: python + + def test_something(frozen_sleep, monkeypatch): + monkeypatch.setattr("my_mod.sleep", frozen_sleep) + my_mod.fn_under_test() + """ + traveller = None + + def fake_sleep(seconds): + nonlocal traveller + utcnow = datetime.now(tz=timezone.utc) + if traveller is not None: + traveller.stop() + traveller = time_machine.travel(utcnow + timedelta(seconds=seconds)) + traveller.start() + + monkeypatch.setattr("time.sleep", fake_sleep) + yield fake_sleep + + if traveller is not None: + traveller.stop() + + +@pytest.fixture +def dag_maker(request): + """Fixture to help create DAG, DagModel, and SerializedDAG automatically. + + You have to use the dag_maker as a context manager and it takes + the same argument as DAG:: + + with dag_maker(dag_id="mydag") as dag: + task1 = EmptyOperator(task_id="mytask") + task2 = EmptyOperator(task_id="mytask2") + + If the DagModel you want to use needs different parameters than the one + automatically created by the dag_maker, you have to update the DagModel as below:: + + dag_maker.dag_model.is_active = False + session.merge(dag_maker.dag_model) + session.commit() + + For any test you use the dag_maker, make sure to create a DagRun:: + + dag_maker.create_dagrun() + + The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun + + If you want to operate on serialized DAGs, then either pass + ``serialized=True`` to the ``dag_maker()`` call, or you can mark your + test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of + these cases the ``dag`` returned by the context manager will be a + lazily-evaluated proxy object to the SerializedDAG. + """ + import lazy_object_proxy + + # IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_. + # This fixture is "called" early on in the pytest collection process, and + # if we import airflow.* here the wrong (non-test) config will be loaded + # and "baked" in to various constants + + want_serialized = False + + # Allow changing default serialized behaviour with `@pytest.mark.need_serialized_dag` or + # `@pytest.mark.need_serialized_dag(False)` + serialized_marker = request.node.get_closest_marker("need_serialized_dag") + if serialized_marker: + (want_serialized,) = serialized_marker.args or (True,) + + from airflow.utils.log.logging_mixin import LoggingMixin + + class DagFactory(LoggingMixin): + _own_session = False + + def __init__(self): + from airflow.models import DagBag + + # Keep all the serialized dags we've created in this test + self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False) + + def __enter__(self): + self.dag.__enter__() + if self.want_serialized: + return lazy_object_proxy.Proxy(self._serialized_dag) + return self.dag + + def _serialized_dag(self): + return self.serialized_model.dag + + def get_serialized_data(self): + try: + data = self.serialized_model.data + except AttributeError: + raise RuntimeError("DAG serialization not requested") + if isinstance(data, str): + return json.loads(data) + return data + + def _bag_dag_compat(self, dag): + # This is a compatibility shim for the old bag_dag method in Airflow <3.0 + # TODO: Remove this when we drop support for Airflow <3.0 in Providers + if hasattr(dag, "parent_dag"): + return self.dagbag.bag_dag(dag, root_dag=dag) + return self.dagbag.bag_dag(dag) + + def __exit__(self, type, value, traceback): + from airflow.models import DagModel + from airflow.models.serialized_dag import SerializedDagModel + + dag = self.dag + dag.__exit__(type, value, traceback) + if type is not None: + return + + dag.clear(session=self.session) + dag.sync_to_db(processor_subdir=self.processor_subdir, session=self.session) + self.dag_model = self.session.get(DagModel, dag.dag_id) + + if self.want_serialized: + self.serialized_model = SerializedDagModel( + dag, processor_subdir=self.dag_model.processor_subdir + ) + self.session.merge(self.serialized_model) + serialized_dag = self._serialized_dag() + self._bag_dag_compat(serialized_dag) + self.session.flush() + else: + self._bag_dag_compat(self.dag) + + def create_dagrun(self, **kwargs): + from airflow.utils import timezone + from airflow.utils.state import State + from airflow.utils.types import DagRunType + + from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS: + from airflow.utils.types import DagRunTriggeredByType + + dag = self.dag + kwargs = { + "state": State.RUNNING, + "start_date": self.start_date, + "session": self.session, + **kwargs, + } + # Need to provide run_id if the user does not either provide one + # explicitly, or pass run_type for inference in dag.create_dagrun(). + if "run_id" not in kwargs and "run_type" not in kwargs: + kwargs["run_id"] = "test" + + if "run_type" not in kwargs: + kwargs["run_type"] = DagRunType.from_run_id(kwargs["run_id"]) + if kwargs.get("execution_date") is None: + if kwargs["run_type"] == DagRunType.MANUAL: + kwargs["execution_date"] = self.start_date + else: + kwargs["execution_date"] = dag.next_dagrun_info(None).logical_date + if "data_interval" not in kwargs: + logical_date = timezone.coerce_datetime(kwargs["execution_date"]) + if kwargs["run_type"] == DagRunType.MANUAL: + data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) + else: + data_interval = dag.infer_automated_data_interval(logical_date) + kwargs["data_interval"] = data_interval + if AIRFLOW_V_3_0_PLUS and "triggered_by" not in kwargs: + kwargs["triggered_by"] = DagRunTriggeredByType.TEST + + self.dag_run = dag.create_dagrun(**kwargs) + for ti in self.dag_run.task_instances: + ti.refresh_from_task(dag.get_task(ti.task_id)) + if self.want_serialized: + self.session.commit() + return self.dag_run + + def create_dagrun_after(self, dagrun, **kwargs): + next_info = self.dag.next_dagrun_info(self.dag.get_run_data_interval(dagrun)) + if next_info is None: + raise ValueError(f"cannot create run after {dagrun}") + return self.create_dagrun( + execution_date=next_info.logical_date, + data_interval=next_info.data_interval, + **kwargs, + ) + + def __call__( + self, + dag_id="test_dag", + schedule=timedelta(days=1), + serialized=want_serialized, + fileloc=None, + processor_subdir=None, + session=None, + **kwargs, + ): + from airflow import settings + from airflow.models.dag import DAG + from airflow.utils import timezone + + if session is None: + self._own_session = True + session = settings.Session() + + self.kwargs = kwargs + self.session = session + self.start_date = self.kwargs.get("start_date", None) + default_args = kwargs.get("default_args", None) + if default_args and not self.start_date: + if "start_date" in default_args: + self.start_date = default_args.get("start_date") + if not self.start_date: + if hasattr(request.module, "DEFAULT_DATE"): + self.start_date = getattr(request.module, "DEFAULT_DATE") + else: + DEFAULT_DATE = timezone.datetime(2016, 1, 1) + self.start_date = DEFAULT_DATE + self.kwargs["start_date"] = self.start_date + # Set schedule argument to explicitly set value, or a default if no + # other scheduling arguments are set. + self.dag = DAG(dag_id, schedule=schedule, **self.kwargs) + self.dag.fileloc = fileloc or request.module.__file__ + self.want_serialized = serialized + self.processor_subdir = processor_subdir + + return self + + def cleanup(self): + from airflow.models import DagModel, DagRun, TaskInstance, XCom + from airflow.models.serialized_dag import SerializedDagModel + from airflow.models.taskmap import TaskMap + from airflow.utils.retries import run_with_db_retries + + from dev.tests_common.test_utils.compat import AssetEvent + + for attempt in run_with_db_retries(logger=self.log): + with attempt: + dag_ids = list(self.dagbag.dag_ids) + if not dag_ids: + return + # To isolate problems here with problems from elsewhere on the session object + self.session.rollback() + + self.session.query(SerializedDagModel).filter( + SerializedDagModel.dag_id.in_(dag_ids) + ).delete(synchronize_session=False) + self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(DagModel).filter(DagModel.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskMap).filter(TaskMap.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(AssetEvent).filter(AssetEvent.source_dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.commit() + if self._own_session: + self.session.expunge_all() + + factory = DagFactory() + + try: + yield factory + finally: + factory.cleanup() + with suppress(AttributeError): + del factory.session + + +@pytest.fixture +def create_dummy_dag(dag_maker): + """Create a `DAG` with a single `EmptyOperator` task. + + DagRun and DagModel is also created. + + Apart from the already existing arguments, any other argument in kwargs + is passed to the DAG and not to the EmptyOperator task. + + If you have an argument that you want to pass to the EmptyOperator that + is not here, please use `default_args` so that the DAG will pass it to the + Task:: + + dag, task = create_dummy_dag(default_args={"start_date": timezone.datetime(2016, 1, 1)}) + + You cannot be able to alter the created DagRun or DagModel, use `dag_maker` fixture instead. + """ + from airflow.operators.empty import EmptyOperator + from airflow.utils.types import DagRunType + + def create_dag( + dag_id="dag", + task_id="op1", + task_display_name=None, + max_active_tis_per_dag=16, + max_active_tis_per_dagrun=None, + pool="default_pool", + executor_config=None, + trigger_rule="all_done", + on_success_callback=None, + on_execute_callback=None, + on_failure_callback=None, + on_retry_callback=None, + email=None, + with_dagrun_type=DagRunType.SCHEDULED, + **kwargs, + ): + op_kwargs = {} + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + if AIRFLOW_V_2_9_PLUS: + op_kwargs["task_display_name"] = task_display_name + with dag_maker(dag_id, **kwargs) as dag: + op = EmptyOperator( + task_id=task_id, + max_active_tis_per_dag=max_active_tis_per_dag, + max_active_tis_per_dagrun=max_active_tis_per_dagrun, + executor_config=executor_config or {}, + on_success_callback=on_success_callback, + on_execute_callback=on_execute_callback, + on_failure_callback=on_failure_callback, + on_retry_callback=on_retry_callback, + email=email, + pool=pool, + trigger_rule=trigger_rule, + **op_kwargs, + ) + if with_dagrun_type is not None: + dag_maker.create_dagrun(run_type=with_dagrun_type) + return dag, op + + return create_dag + + +if TYPE_CHECKING: + from airflow.models.taskinstance import TaskInstance + + +@pytest.fixture +def create_task_instance(dag_maker, create_dummy_dag): + """Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). + + Uses ``create_dummy_dag`` to create the dag structure. + """ + from airflow.operators.empty import EmptyOperator + + def maker( + execution_date=None, + dagrun_state=None, + state=None, + run_id=None, + run_type=None, + data_interval=None, + external_executor_id=None, + dag_id="dag", + task_id="op1", + task_display_name=None, + max_active_tis_per_dag=16, + max_active_tis_per_dagrun=None, + pool="default_pool", + executor_config=None, + trigger_rule="all_done", + on_success_callback=None, + on_execute_callback=None, + on_failure_callback=None, + on_retry_callback=None, + email=None, + map_index=-1, + **kwargs, + ) -> TaskInstance: + from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS: + from airflow.utils.types import DagRunTriggeredByType + + if execution_date is None: + from airflow.utils import timezone + + execution_date = timezone.utcnow() + with dag_maker(dag_id, **kwargs): + op_kwargs = {} + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + if AIRFLOW_V_2_9_PLUS: + op_kwargs["task_display_name"] = task_display_name + task = EmptyOperator( + task_id=task_id, + max_active_tis_per_dag=max_active_tis_per_dag, + max_active_tis_per_dagrun=max_active_tis_per_dagrun, + executor_config=executor_config or {}, + on_success_callback=on_success_callback, + on_execute_callback=on_execute_callback, + on_failure_callback=on_failure_callback, + on_retry_callback=on_retry_callback, + email=email, + pool=pool, + trigger_rule=trigger_rule, + **op_kwargs, + ) + + dagrun_kwargs = { + "execution_date": execution_date, + "state": dagrun_state, + } + dagrun_kwargs.update({"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {}) + if run_id is not None: + dagrun_kwargs["run_id"] = run_id + if run_type is not None: + dagrun_kwargs["run_type"] = run_type + if data_interval is not None: + dagrun_kwargs["data_interval"] = data_interval + dagrun = dag_maker.create_dagrun(**dagrun_kwargs) + (ti,) = dagrun.task_instances + ti.task = task + ti.state = state + ti.external_executor_id = external_executor_id + ti.map_index = map_index + + dag_maker.session.flush() + return ti + + return maker + + +@pytest.fixture +def create_serialized_task_instance_of_operator(dag_maker): + def _create_task_instance( + operator_class, + *, + dag_id, + execution_date=None, + session=None, + **operator_kwargs, + ) -> TaskInstance: + with dag_maker(dag_id=dag_id, serialized=True, session=session): + operator_class(**operator_kwargs) + if execution_date is None: + dagrun_kwargs = {} + else: + dagrun_kwargs = {"execution_date": execution_date} + (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances + return ti + + return _create_task_instance + + +@pytest.fixture +def create_task_instance_of_operator(dag_maker): + def _create_task_instance( + operator_class, + *, + dag_id, + execution_date=None, + session=None, + **operator_kwargs, + ) -> TaskInstance: + with dag_maker(dag_id=dag_id, session=session, serialized=True): + operator_class(**operator_kwargs) + if execution_date is None: + dagrun_kwargs = {} + else: + dagrun_kwargs = {"execution_date": execution_date} + (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances + return ti + + return _create_task_instance + + +@pytest.fixture +def create_task_of_operator(dag_maker): + def _create_task_of_operator(operator_class, *, dag_id, session=None, **operator_kwargs): + with dag_maker(dag_id=dag_id, session=session): + task = operator_class(**operator_kwargs) + return task + + return _create_task_of_operator + + +@pytest.fixture +def session(): + from airflow.utils.session import create_session + + with create_session() as session: + yield session + session.rollback() + + +@pytest.fixture +def get_test_dag(): + def _get(dag_id: str): + from airflow.models.dagbag import DagBag + from airflow.models.serialized_dag import SerializedDagModel + + dag_file = AIRFLOW_TESTS_DIR / "dags" / f"{dag_id}.py" + dagbag = DagBag(dag_folder=dag_file, include_examples=False) + + dag = dagbag.get_dag(dag_id) + dag.sync_to_db() + SerializedDagModel.write_dag(dag) + + return dag + + return _get + + +@pytest.fixture +def create_log_template(request): + from airflow import settings + from airflow.models.tasklog import LogTemplate + + session = settings.Session() + + def _create_log_template(filename_template, elasticsearch_id=""): + log_template = LogTemplate(filename=filename_template, elasticsearch_id=elasticsearch_id) + session.add(log_template) + session.commit() + + def _delete_log_template(): + from airflow.models import DagRun, TaskInstance + + session.query(TaskInstance).delete() + session.query(DagRun).delete() + session.delete(log_template) + session.commit() + + request.addfinalizer(_delete_log_template) + + return _create_log_template + + +@pytest.fixture +def reset_logging_config(): + import logging.config + + from airflow import settings + from airflow.utils.module_loading import import_string + + logging_config = import_string(settings.LOGGING_CLASS_PATH) + logging.config.dictConfig(logging_config) + + +@pytest.fixture(scope="session", autouse=True) +def suppress_info_logs_for_dag_and_fab(): + import logging + + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + dag_logger = logging.getLogger("airflow.models.dag") + dag_logger.setLevel(logging.WARNING) + + if AIRFLOW_V_2_9_PLUS: + fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") + fab_logger.setLevel(logging.WARNING) + else: + fab_logger = logging.getLogger("airflow.www.fab_security") + fab_logger.setLevel(logging.WARNING) + + +@pytest.fixture(scope="module", autouse=True) +def _clear_db(request): + """Clear DB before each test module run.""" + from dev.tests_common.test_utils.db import clear_all, initial_db_init + + if not request.config.option.db_cleanup: + return + if skip_db_tests: + return + from airflow.configuration import conf + + sql_alchemy_conn = conf.get("database", "sql_alchemy_conn") + if sql_alchemy_conn.startswith("sqlite"): + sql_alchemy_file = sql_alchemy_conn.replace("sqlite:///", "") + if not os.path.exists(sql_alchemy_file): + print(f"The sqlite file `{sql_alchemy_file}` does not exist. Attempt to initialize it.") + initial_db_init() + + dist_option = getattr(request.config.option, "dist", "no") + if dist_option != "no" or hasattr(request.config, "workerinput"): + # Skip if pytest-xdist detected (controller or worker) + return + try: + clear_all() + except Exception as ex: + exc_name_parts = [type(ex).__name__] + exc_module = type(ex).__module__ + if exc_module != "builtins": + exc_name_parts.insert(0, exc_module) + extra_msg = "" if request.config.option.db_init else ", try to run with flag --with-db-init" + pytest.exit(f"Unable clear test DB{extra_msg}, got error {'.'.join(exc_name_parts)}: {ex}") + + +@pytest.fixture(autouse=True) +def clear_lru_cache(): + from airflow.executors.executor_loader import ExecutorLoader + from airflow.utils.entry_points import _get_grouped_entry_points + + ExecutorLoader.validate_database_executor_compatibility.cache_clear() + try: + _get_grouped_entry_points.cache_clear() + try: + yield + finally: + _get_grouped_entry_points.cache_clear() + finally: + ExecutorLoader.validate_database_executor_compatibility.cache_clear() + + +@pytest.fixture(autouse=True) +def refuse_to_run_test_from_wrongly_named_files(request: pytest.FixtureRequest): + filepath = request.node.path + is_system_test: bool = "tests/system/" in os.fspath(filepath) + test_name = request.node.name + if request.node.cls: + test_name = f"{request.node.cls.__name__}.{test_name}" + if is_system_test and not filepath.name.startswith(("example_", "test_")): + pytest.fail( + f"All test method files in tests/system must start with 'example_' or 'test_'. " + f"Seems that {os.fspath(filepath)!r} contains {test_name!r} that looks like a test case. " + f"Please rename the file to follow the example_* or test_* pattern if you want to run the tests " + f"in it." + ) + elif not is_system_test and not filepath.name.startswith("test_"): + pytest.fail( + f"All test method files in tests/ must start with 'test_'. Seems that {os.fspath(filepath)!r} " + f"contains {test_name!r} that looks like a test case. Please rename the file to " + f"follow the test_* pattern if you want to run the tests in it." + ) + + +@pytest.fixture(autouse=True) +def initialize_providers_manager(): + from airflow.providers_manager import ProvidersManager + + ProvidersManager().initialize_providers_configuration() + + +@pytest.fixture(autouse=True) +def close_all_sqlalchemy_sessions(): + from sqlalchemy.orm import close_all_sessions + + with suppress(Exception): + close_all_sessions() + yield + with suppress(Exception): + close_all_sessions() + + +@pytest.fixture +def cleanup_providers_manager(): + from airflow.providers_manager import ProvidersManager + + ProvidersManager()._cleanup() + ProvidersManager().initialize_providers_configuration() + try: + yield + finally: + ProvidersManager()._cleanup() + + +@pytest.fixture(autouse=True) +def _disable_redact(request: pytest.FixtureRequest, mocker): + """Disable redacted text in tests, except specific.""" + from airflow import settings + + if next(request.node.iter_markers("enable_redact"), None): + with pytest.MonkeyPatch.context() as mp_ctx: + mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", True) + yield + return + + mocked_redact = mocker.patch("airflow.utils.log.secrets_masker.SecretsMasker.redact") + mocked_redact.side_effect = lambda item, name=None, max_depth=None: item + with pytest.MonkeyPatch.context() as mp_ctx: + mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", False) + yield + return + + +@pytest.fixture +def providers_src_folder() -> Path: + import airflow.providers + + return Path(airflow.providers.__path__[0]).parents[1] + + +@pytest.fixture +def hook_lineage_collector(): + from airflow.lineage import hook + + hook._hook_lineage_collector = None + hook._hook_lineage_collector = hook.HookLineageCollector() + yield hook.get_hook_lineage_collector() + hook._hook_lineage_collector = None + + +@pytest.fixture +def clean_dags_and_dagruns(): + """Fixture that cleans the database before and after every test.""" + from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs + + clear_db_runs() + clear_db_dags() + yield # Test runs here + clear_db_dags() + clear_db_runs() + + +@pytest.fixture(scope="session") +def app(): + from dev.tests_common.test_utils.config import conf_vars + + with conf_vars({("fab", "auth_rate_limited"): "False"}): + from airflow.www import app + + yield app.create_app(testing=True) + + +@pytest.fixture +def secret_key() -> str: + """Return secret key configured.""" + from airflow.configuration import conf + + the_key = conf.get("webserver", "SECRET_KEY") + if the_key is None: + raise RuntimeError( + "The secret key SHOULD be configured as `[webserver] secret_key` in the " + "configuration/environment at this stage! " + ) + return the_key + + +@pytest.fixture +def url_safe_serializer(secret_key) -> URLSafeSerializer: + from itsdangerous import URLSafeSerializer + + return URLSafeSerializer(secret_key) diff --git a/tests/test_utils/README.md b/dev/tests_common/test_utils/README.md similarity index 67% rename from tests/test_utils/README.md rename to dev/tests_common/test_utils/README.md index cdde5b83a59b..2f5e694be5e3 100644 --- a/tests/test_utils/README.md +++ b/dev/tests_common/test_utils/README.md @@ -15,6 +15,14 @@ KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ---> + --> + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Utilities for use in tests.](#utilities-for-use-in-tests) + + # Utilities for use in tests. diff --git a/tests/system/providers/google/marketing_platform/__init__.py b/dev/tests_common/test_utils/__init__.py similarity index 87% rename from tests/system/providers/google/marketing_platform/__init__.py rename to dev/tests_common/test_utils/__init__.py index 217e5db96078..e440178fae6c 100644 --- a/tests/system/providers/google/marketing_platform/__init__.py +++ b/dev/tests_common/test_utils/__init__.py @@ -15,3 +15,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from __future__ import annotations + +from pathlib import Path + +AIRFLOW_MAIN_FOLDER = Path(__file__).parents[3] diff --git a/tests/test_utils/api_connexion_utils.py b/dev/tests_common/test_utils/api_connexion_utils.py similarity index 93% rename from tests/test_utils/api_connexion_utils.py rename to dev/tests_common/test_utils/api_connexion_utils.py index 48869ee48078..2d273af2e687 100644 --- a/tests/test_utils/api_connexion_utils.py +++ b/dev/tests_common/test_utils/api_connexion_utils.py @@ -20,7 +20,8 @@ from typing import TYPE_CHECKING from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES @@ -48,11 +49,7 @@ def create_user_scope(app, username, **kwargs): It will create a user and provide it for the fixture via YIELD (generator) then will tidy up once test is complete """ - from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user as create_user_fab, - ) - - test_user = create_user_fab(app, username, **kwargs) + test_user = create_user(app, username, **kwargs) try: yield test_user diff --git a/tests/test_utils/asserts.py b/dev/tests_common/test_utils/asserts.py similarity index 100% rename from tests/test_utils/asserts.py rename to dev/tests_common/test_utils/asserts.py diff --git a/tests/test_utils/azure_system_helpers.py b/dev/tests_common/test_utils/azure_system_helpers.py similarity index 97% rename from tests/test_utils/azure_system_helpers.py rename to dev/tests_common/test_utils/azure_system_helpers.py index 033399235a29..b1a46617c082 100644 --- a/tests/test_utils/azure_system_helpers.py +++ b/dev/tests_common/test_utils/azure_system_helpers.py @@ -28,8 +28,9 @@ from airflow.models import Connection from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook from airflow.utils.process_utils import patch_environ -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.system_tests_class import SystemTest + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.system_tests_class import SystemTest AZURE_DAG_FOLDER = os.path.join( AIRFLOW_MAIN_FOLDER, "airflow", "providers", "microsoft", "azure", "example_dags" diff --git a/tests/test_utils/compat.py b/dev/tests_common/test_utils/compat.py similarity index 100% rename from tests/test_utils/compat.py rename to dev/tests_common/test_utils/compat.py diff --git a/tests/test_utils/config.py b/dev/tests_common/test_utils/config.py similarity index 100% rename from tests/test_utils/config.py rename to dev/tests_common/test_utils/config.py diff --git a/tests/test_utils/db.py b/dev/tests_common/test_utils/db.py similarity index 89% rename from tests/test_utils/db.py rename to dev/tests_common/test_utils/db.py index a5dd94e2d009..4b2dede05ead 100644 --- a/tests/test_utils/db.py +++ b/dev/tests_common/test_utils/db.py @@ -17,6 +17,8 @@ # under the License. from __future__ import annotations +import os + from airflow.jobs.job import Job from airflow.models import ( Connection, @@ -42,7 +44,8 @@ from airflow.security.permissions import RESOURCE_DAG_PREFIX from airflow.utils.db import add_default_pool_if_not_exists, create_default_connections, reflect_tables from airflow.utils.session import create_session -from tests.test_utils.compat import ( + +from dev.tests_common.test_utils.compat import ( AIRFLOW_V_2_10_PLUS, AssetDagRunQueue, AssetEvent, @@ -53,6 +56,26 @@ ) +def initial_db_init(): + from flask import Flask + + from airflow.configuration import conf + from airflow.utils import db + from airflow.www.extensions.init_appbuilder import init_appbuilder + from airflow.www.extensions.init_auth_manager import get_auth_manager + + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS + + db.resetdb() + db.bootstrap_dagbag() + # minimal app to add roles + flask_app = Flask(__name__) + flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN") + init_appbuilder(flask_app) + if AIRFLOW_V_2_8_PLUS: + get_auth_manager().init() + + def clear_db_runs(): with create_session() as session: session.query(Job).delete() @@ -83,7 +106,7 @@ def clear_db_assets(): session.query(DagScheduleAssetReference).delete() session.query(TaskOutletAssetReference).delete() if AIRFLOW_V_2_10_PLUS: - from tests.test_utils.compat import AssetAliasModel + from dev.tests_common.test_utils.compat import AssetAliasModel session.query(AssetAliasModel).delete() @@ -250,3 +273,7 @@ def clear_all(): clear_db_pools() clear_db_connections(add_default_connections_back=True) clear_dag_specific_permissions() + + +def is_db_isolation_mode(): + return os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true" diff --git a/tests/test_utils/decorators.py b/dev/tests_common/test_utils/decorators.py similarity index 100% rename from tests/test_utils/decorators.py rename to dev/tests_common/test_utils/decorators.py diff --git a/tests/test_utils/fake_datetime.py b/dev/tests_common/test_utils/fake_datetime.py similarity index 100% rename from tests/test_utils/fake_datetime.py rename to dev/tests_common/test_utils/fake_datetime.py diff --git a/tests/test_utils/gcp_system_helpers.py b/dev/tests_common/test_utils/gcp_system_helpers.py similarity index 90% rename from tests/test_utils/gcp_system_helpers.py rename to dev/tests_common/test_utils/gcp_system_helpers.py index 0e681d64c49b..e17679bd8eb5 100644 --- a/tests/test_utils/gcp_system_helpers.py +++ b/dev/tests_common/test_utils/gcp_system_helpers.py @@ -20,6 +20,7 @@ import os import tempfile from contextlib import contextmanager +from pathlib import Path from tempfile import TemporaryDirectory from typing import Sequence from unittest import mock @@ -27,27 +28,20 @@ import pytest from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS +import airflow.providers.google from airflow.providers.google.cloud.utils.credentials_provider import provide_gcp_conn_and_credentials -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import CommandExecutor -from tests.test_utils.system_tests_class import SystemTest -CLOUD_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "cloud", "example_dags" -) -MARKETING_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "marketing_platform", "example_dags" -) -GSUITE_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "suite", "example_dags" -) -FIREBASE_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "firebase", "example_dags" -) -LEVELDB_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "leveldb", "example_dags" -) +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import CommandExecutor +from dev.tests_common.test_utils.system_tests_class import SystemTest +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY + +GCP_DIR = Path(airflow.providers.google.__file__).parent +CLOUD_DAG_FOLDER = GCP_DIR.joinpath("cloud", "example_dags") +MARKETING_DAG_FOLDER = GCP_DIR.joinpath("marketing_platform", "example_dags") +GSUITE_DAG_FOLDER = GCP_DIR.joinpath("suite", "example_dags") +FIREBASE_DAG_FOLDER = GCP_DIR.joinpath("firebase", "example_dags") +LEVELDB_DAG_FOLDER = GCP_DIR.joinpath("leveldb", "example_dags") POSTGRES_LOCAL_EXECUTOR = os.path.join( AIRFLOW_MAIN_FOLDER, "tests", "test_utils", "postgres_local_executor.cfg" ) diff --git a/tests/test_utils/get_all_tests.py b/dev/tests_common/test_utils/get_all_tests.py similarity index 100% rename from tests/test_utils/get_all_tests.py rename to dev/tests_common/test_utils/get_all_tests.py diff --git a/tests/test_utils/hdfs_utils.py b/dev/tests_common/test_utils/hdfs_utils.py similarity index 100% rename from tests/test_utils/hdfs_utils.py rename to dev/tests_common/test_utils/hdfs_utils.py diff --git a/tests/test_utils/logging_command_executor.py b/dev/tests_common/test_utils/logging_command_executor.py similarity index 100% rename from tests/test_utils/logging_command_executor.py rename to dev/tests_common/test_utils/logging_command_executor.py diff --git a/tests/test_utils/mapping.py b/dev/tests_common/test_utils/mapping.py similarity index 100% rename from tests/test_utils/mapping.py rename to dev/tests_common/test_utils/mapping.py diff --git a/tests/test_utils/mock_executor.py b/dev/tests_common/test_utils/mock_executor.py similarity index 100% rename from tests/test_utils/mock_executor.py rename to dev/tests_common/test_utils/mock_executor.py diff --git a/tests/test_utils/mock_operators.py b/dev/tests_common/test_utils/mock_operators.py similarity index 98% rename from tests/test_utils/mock_operators.py rename to dev/tests_common/test_utils/mock_operators.py index cd816707a59f..0df0afec824c 100644 --- a/tests/test_utils/mock_operators.py +++ b/dev/tests_common/test_utils/mock_operators.py @@ -23,7 +23,8 @@ from airflow.models.baseoperator import BaseOperator from airflow.models.xcom import XCom -from tests.test_utils.compat import BaseOperatorLink + +from dev.tests_common.test_utils.compat import BaseOperatorLink if TYPE_CHECKING: import jinja2 diff --git a/tests/test_utils/mock_plugins.py b/dev/tests_common/test_utils/mock_plugins.py similarity index 100% rename from tests/test_utils/mock_plugins.py rename to dev/tests_common/test_utils/mock_plugins.py diff --git a/tests/test_utils/mock_security_manager.py b/dev/tests_common/test_utils/mock_security_manager.py similarity index 92% rename from tests/test_utils/mock_security_manager.py rename to dev/tests_common/test_utils/mock_security_manager.py index d95d077a9882..6b9f45e3d841 100644 --- a/tests/test_utils/mock_security_manager.py +++ b/dev/tests_common/test_utils/mock_security_manager.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride diff --git a/airflow/providers/airbyte/triggers/__init__.py b/dev/tests_common/test_utils/operators/__init__.py similarity index 100% rename from airflow/providers/airbyte/triggers/__init__.py rename to dev/tests_common/test_utils/operators/__init__.py diff --git a/tests/test_utils/operators/postgres_local_executor.cfg b/dev/tests_common/test_utils/operators/postgres_local_executor.cfg similarity index 100% rename from tests/test_utils/operators/postgres_local_executor.cfg rename to dev/tests_common/test_utils/operators/postgres_local_executor.cfg diff --git a/airflow/providers/alibaba/cloud/log/__init__.py b/dev/tests_common/test_utils/perf/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/log/__init__.py rename to dev/tests_common/test_utils/perf/__init__.py diff --git a/tests/test_utils/perf/perf_kit/__init__.py b/dev/tests_common/test_utils/perf/perf_kit/__init__.py similarity index 100% rename from tests/test_utils/perf/perf_kit/__init__.py rename to dev/tests_common/test_utils/perf/perf_kit/__init__.py diff --git a/tests/test_utils/perf/perf_kit/memory.py b/dev/tests_common/test_utils/perf/perf_kit/memory.py similarity index 100% rename from tests/test_utils/perf/perf_kit/memory.py rename to dev/tests_common/test_utils/perf/perf_kit/memory.py diff --git a/tests/test_utils/perf/perf_kit/python.py b/dev/tests_common/test_utils/perf/perf_kit/python.py similarity index 100% rename from tests/test_utils/perf/perf_kit/python.py rename to dev/tests_common/test_utils/perf/perf_kit/python.py diff --git a/tests/test_utils/perf/perf_kit/repeat_and_time.py b/dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py similarity index 100% rename from tests/test_utils/perf/perf_kit/repeat_and_time.py rename to dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py diff --git a/tests/test_utils/perf/perf_kit/sqlalchemy.py b/dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py similarity index 100% rename from tests/test_utils/perf/perf_kit/sqlalchemy.py rename to dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py diff --git a/tests/test_utils/permissions.py b/dev/tests_common/test_utils/permissions.py similarity index 100% rename from tests/test_utils/permissions.py rename to dev/tests_common/test_utils/permissions.py diff --git a/tests/test_utils/providers.py b/dev/tests_common/test_utils/providers.py similarity index 100% rename from tests/test_utils/providers.py rename to dev/tests_common/test_utils/providers.py diff --git a/tests/test_utils/remote_user_api_auth_backend.py b/dev/tests_common/test_utils/remote_user_api_auth_backend.py similarity index 100% rename from tests/test_utils/remote_user_api_auth_backend.py rename to dev/tests_common/test_utils/remote_user_api_auth_backend.py diff --git a/tests/test_utils/reset_warning_registry.py b/dev/tests_common/test_utils/reset_warning_registry.py similarity index 100% rename from tests/test_utils/reset_warning_registry.py rename to dev/tests_common/test_utils/reset_warning_registry.py diff --git a/tests/test_utils/salesforce_system_helpers.py b/dev/tests_common/test_utils/salesforce_system_helpers.py similarity index 100% rename from tests/test_utils/salesforce_system_helpers.py rename to dev/tests_common/test_utils/salesforce_system_helpers.py diff --git a/tests/test_utils/sftp_system_helpers.py b/dev/tests_common/test_utils/sftp_system_helpers.py similarity index 100% rename from tests/test_utils/sftp_system_helpers.py rename to dev/tests_common/test_utils/sftp_system_helpers.py diff --git a/tests/system/utils/__init__.py b/dev/tests_common/test_utils/system_tests.py similarity index 100% rename from tests/system/utils/__init__.py rename to dev/tests_common/test_utils/system_tests.py diff --git a/tests/test_utils/system_tests_class.py b/dev/tests_common/test_utils/system_tests_class.py similarity index 97% rename from tests/test_utils/system_tests_class.py rename to dev/tests_common/test_utils/system_tests_class.py index cfd72174e177..836782b8584c 100644 --- a/tests/test_utils/system_tests_class.py +++ b/dev/tests_common/test_utils/system_tests_class.py @@ -29,8 +29,9 @@ from airflow.configuration import AIRFLOW_HOME, AirflowConfigParser, get_airflow_config from airflow.exceptions import AirflowException from airflow.models.dagbag import DagBag -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import get_executor + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import get_executor DEFAULT_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "example_dags") diff --git a/tests/test_utils/terraform.py b/dev/tests_common/test_utils/terraform.py similarity index 95% rename from tests/test_utils/terraform.py rename to dev/tests_common/test_utils/terraform.py index bbb68b60c1a8..b600ef6643b1 100644 --- a/tests/test_utils/terraform.py +++ b/dev/tests_common/test_utils/terraform.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.system_tests_class import SystemTest +from dev.tests_common.test_utils.system_tests_class import SystemTest class Terraform(SystemTest): diff --git a/tests/test_utils/timetables.py b/dev/tests_common/test_utils/timetables.py similarity index 100% rename from tests/test_utils/timetables.py rename to dev/tests_common/test_utils/timetables.py diff --git a/tests/system/utils/watcher.py b/dev/tests_common/test_utils/watcher.py similarity index 100% rename from tests/system/utils/watcher.py rename to dev/tests_common/test_utils/watcher.py diff --git a/tests/test_utils/www.py b/dev/tests_common/test_utils/www.py similarity index 100% rename from tests/test_utils/www.py rename to dev/tests_common/test_utils/www.py diff --git a/docs/apache-airflow-providers-airbyte/changelog.rst b/docs/apache-airflow-providers-airbyte/changelog.rst index 2e7bb99a7f6b..6ac2033101e6 100644 --- a/docs/apache-airflow-providers-airbyte/changelog.rst +++ b/docs/apache-airflow-providers-airbyte/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/airbyte/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/airbyte/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-airbyte/index.rst b/docs/apache-airflow-providers-airbyte/index.rst index a967e36ff4fa..c4aff1b11543 100644 --- a/docs/apache-airflow-providers-airbyte/index.rst +++ b/docs/apache-airflow-providers-airbyte/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/airbyte/index> + System Tests <_api/tests/system/airbyte/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst index d07301dc42b8..45632be1e3d9 100644 --- a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst +++ b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst @@ -47,14 +47,14 @@ This Operator will initiate the Airbyte job, and the Operator manages the job st An example using the synchronous way: -.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py +.. exampleinclude:: /../../providers/tests/system/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_synchronous] :end-before: [END howto_operator_airbyte_synchronous] An example using the async way: -.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py +.. exampleinclude:: /../../providers/tests/system/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_asynchronous] :end-before: [END howto_operator_airbyte_asynchronous] diff --git a/docs/apache-airflow-providers-alibaba/changelog.rst b/docs/apache-airflow-providers-alibaba/changelog.rst index 5cb1e797e2ae..f4c779dbf501 100644 --- a/docs/apache-airflow-providers-alibaba/changelog.rst +++ b/docs/apache-airflow-providers-alibaba/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/alibaba/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/alibaba/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-alibaba/index.rst b/docs/apache-airflow-providers-alibaba/index.rst index ab2b244861b1..4529cd7c02bb 100644 --- a/docs/apache-airflow-providers-alibaba/index.rst +++ b/docs/apache-airflow-providers-alibaba/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/alibaba/index> + System Tests <_api/tests/system/alibaba/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst b/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst index ac3f0638ad24..952eb6c2c603 100644 --- a/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst +++ b/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst @@ -39,7 +39,7 @@ Defining tasks In the following code we submit Spark Pi and Spark Logistic regression applications. -.. exampleinclude:: /../../tests/system/providers/alibaba/example_adb_spark_batch.py +.. exampleinclude:: /../../providers/tests/system/alibaba/example_adb_spark_batch.py :language: python :start-after: [START howto_operator_adb_spark_batch] :end-before: [END howto_operator_adb_spark_batch] diff --git a/docs/apache-airflow-providers-alibaba/operators/oss.rst b/docs/apache-airflow-providers-alibaba/operators/oss.rst index d09949be0be0..17d3829e3742 100644 --- a/docs/apache-airflow-providers-alibaba/operators/oss.rst +++ b/docs/apache-airflow-providers-alibaba/operators/oss.rst @@ -45,7 +45,7 @@ Defining tasks In the following code we create a new bucket and then delete the bucket. -.. exampleinclude:: /../../tests/system/providers/alibaba/example_oss_bucket.py +.. exampleinclude:: /../../providers/tests/system/alibaba/example_oss_bucket.py :language: python :start-after: [START howto_operator_oss_bucket] :end-before: [END howto_operator_oss_bucket] diff --git a/docs/apache-airflow-providers-amazon/changelog.rst b/docs/apache-airflow-providers-amazon/changelog.rst index abd12152cfae..8138c8db39b2 100644 --- a/docs/apache-airflow-providers-amazon/changelog.rst +++ b/docs/apache-airflow-providers-amazon/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/amazon/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/amazon/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-amazon/example-dags.rst b/docs/apache-airflow-providers-amazon/example-dags.rst index c01939ad25af..2762597cb4ba 100644 --- a/docs/apache-airflow-providers-amazon/example-dags.rst +++ b/docs/apache-airflow-providers-amazon/example-dags.rst @@ -20,4 +20,4 @@ Example DAGs You can learn how to use Amazon AWS integrations by analyzing the source code of the example DAGs: -* `Amazon AWS `__ +* `Amazon AWS `__ diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 6d354b1a5f89..870bbe562632 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/amazon/index> + System Tests <_api/tests/system/amazon/index> System Tests Dashboard .. toctree:: diff --git a/docs/apache-airflow-providers-amazon/operators/appflow.rst b/docs/apache-airflow-providers-amazon/operators/appflow.rst index c28cfb001b5a..d54d79df75cb 100644 --- a/docs/apache-airflow-providers-amazon/operators/appflow.rst +++ b/docs/apache-airflow-providers-amazon/operators/appflow.rst @@ -49,7 +49,7 @@ Run Flow To run an AppFlow flow keeping as is, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow_run.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow_run.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run] @@ -66,7 +66,7 @@ Run Flow Full To run an AppFlow flow removing all filters, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunFullOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_full] @@ -83,7 +83,7 @@ Run Flow Daily To run an AppFlow flow filtering daily records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunDailyOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_daily] @@ -100,7 +100,7 @@ Run Flow Before To run an AppFlow flow filtering future records and selecting the past ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunBeforeOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_before] @@ -117,7 +117,7 @@ Run Flow After To run an AppFlow flow filtering past records and selecting the future ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunAfterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_after] @@ -134,7 +134,7 @@ Skipping Tasks For Empty Runs To skip tasks when some AppFlow run return zero records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRecordsShortCircuitOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_shortcircuit] diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst index e789290a6ba0..295da63f9170 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst @@ -48,9 +48,9 @@ to run a query in Amazon Athena. In the following example, we query an existing Athena table and send the results to an existing Amazon S3 bucket. For more examples of how to use this operator, please -see the `Sample DAG `__. +see the `Sample DAG `__. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_operator_athena] :dedent: 4 @@ -67,7 +67,7 @@ Wait on Amazon Athena query results Use the :class:`~airflow.providers.amazon.aws.sensors.athena.AthenaSensor` to wait for the results of a query in Amazon Athena. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_sensor_athena] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst index c29f89063fcd..9f92e17e010e 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst @@ -41,7 +41,7 @@ The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries again To execute a single SQL query against an Amazon Athena without bringing back the results to Airflow, please use ``AthenaOperator`` instead. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] @@ -51,7 +51,7 @@ Also, if you need to do simple data quality tests with Amazon Athena, you can us The below example demonstrates how to instantiate the SQLTableCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_table_check] diff --git a/docs/apache-airflow-providers-amazon/operators/batch.rst b/docs/apache-airflow-providers-amazon/operators/batch.rst index 4cc2a2b0cced..efb71fafe804 100644 --- a/docs/apache-airflow-providers-amazon/operators/batch.rst +++ b/docs/apache-airflow-providers-amazon/operators/batch.rst @@ -40,7 +40,7 @@ Submit a new AWS Batch job To submit a new AWS Batch job and monitor it until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch] @@ -54,7 +54,7 @@ Create an AWS Batch compute environment To create a new AWS Batch compute environment you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchCreateComputeEnvironmentOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_create_compute_environment] @@ -71,7 +71,7 @@ Wait on an AWS Batch job state To wait on the state of an AWS Batch Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch] @@ -94,7 +94,7 @@ Wait on an AWS Batch compute environment status To wait on the status of an AWS Batch compute environment until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchComputeEnvironmentSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_compute_environment] @@ -108,7 +108,7 @@ Wait on an AWS Batch job queue status To wait on the status of an AWS Batch job queue until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchJobQueueSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_job_queue] diff --git a/docs/apache-airflow-providers-amazon/operators/bedrock.rst b/docs/apache-airflow-providers-amazon/operators/bedrock.rst index daf930156517..6a2af21f2ff9 100644 --- a/docs/apache-airflow-providers-amazon/operators/bedrock.rst +++ b/docs/apache-airflow-providers-amazon/operators/bedrock.rst @@ -52,7 +52,7 @@ for details on the different formats, see For example, to invoke a Meta Llama model you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_llama_model] @@ -60,7 +60,7 @@ For example, to invoke a Meta Llama model you would use: To invoke an Amazon Titan model you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_titan_model] @@ -68,7 +68,7 @@ To invoke an Amazon Titan model you would use: To invoke a Claude V2 model using the Completions API you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_claude_model] @@ -90,7 +90,7 @@ and the training/validation data size. To monitor the state of the job, you can or the :class:`~airflow.providers.amazon.aws.triggers.BedrockCustomizeModelCompletedTrigger` Trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_customize_model] @@ -112,7 +112,7 @@ or the :class:`~airflow.providers.amazon.aws.triggers.BedrockProvisionModelThrou Trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_provision_throughput] @@ -129,7 +129,7 @@ To create an Amazon Bedrock Knowledge Base, you can use For more information on which models support embedding data into a vector store, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_knowledge_base] @@ -142,7 +142,7 @@ Delete an Amazon Bedrock Knowledge Base Deleting a Knowledge Base is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_knowledge_base] :end-before: [END howto_operator_bedrock_delete_knowledge_base] @@ -155,7 +155,7 @@ Create an Amazon Bedrock Data Source To create an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockCreateDataSourceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_data_source] @@ -168,7 +168,7 @@ Delete an Amazon Bedrock Data Source Deleting a Data Source is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_data_source] :end-before: [END howto_operator_bedrock_delete_data_source] @@ -181,7 +181,7 @@ Ingest data into an Amazon Bedrock Data Source To add data from an Amazon S3 bucket into an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockIngestDataOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_ingest_data] @@ -201,7 +201,7 @@ would like to pass the results through an LLM in order to generate a text respon For more information on which models support retrieving information from a knowledge base, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_retrieve] @@ -223,7 +223,7 @@ NOTE: Support for "external sources" was added in boto 1.34.90 Example using an Amazon Bedrock Knowledge Base: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_knowledge_base_rag] @@ -231,7 +231,7 @@ Example using an Amazon Bedrock Knowledge Base: Example using a PDF file in an Amazon S3 Bucket: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_external_sources_rag] @@ -249,7 +249,7 @@ Wait for an Amazon Bedrock customize model job To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockCustomizeModelCompletedSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_customize_model] @@ -264,7 +264,7 @@ To wait on the state of an Amazon Bedrock provision model throughput job until i terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockProvisionModelThroughputCompletedSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_provision_throughput] @@ -278,7 +278,7 @@ Wait for an Amazon Bedrock Knowledge Base To wait on the state of an Amazon Bedrock Knowledge Base until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockKnowledgeBaseActiveSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_knowledge_base_active] @@ -292,7 +292,7 @@ Wait for an Amazon Bedrock ingestion job to finish To wait on the state of an Amazon Bedrock data ingestion job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockIngestionJobSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_ingest_data] diff --git a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst b/docs/apache-airflow-providers-amazon/operators/cloudformation.rst index ff45efcdb645..40ab46bcf732 100644 --- a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst +++ b/docs/apache-airflow-providers-amazon/operators/cloudformation.rst @@ -47,7 +47,7 @@ Create an AWS CloudFormation stack To create a new AWS CloudFormation stack use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationCreateStackOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_create_stack] @@ -61,7 +61,7 @@ Delete an AWS CloudFormation stack To delete an AWS CloudFormation stack you can use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationDeleteStackOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_delete_stack] @@ -78,7 +78,7 @@ Wait on an AWS CloudFormation stack creation state To wait on the state of an AWS CloudFormation stack creation until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationCreateStackSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_create_stack] @@ -92,7 +92,7 @@ Wait on an AWS CloudFormation stack deletion state To wait on the state of an AWS CloudFormation stack deletion until it reaches a terminal state you can use use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationDeleteStackSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_delete_stack] diff --git a/docs/apache-airflow-providers-amazon/operators/comprehend.rst b/docs/apache-airflow-providers-amazon/operators/comprehend.rst index af6ed023fe05..c273643651bb 100644 --- a/docs/apache-airflow-providers-amazon/operators/comprehend.rst +++ b/docs/apache-airflow-providers-amazon/operators/comprehend.rst @@ -44,7 +44,7 @@ Create an Amazon Comprehend Start PII Entities Detection Job To create an Amazon Comprehend Start PII Entities Detection Job, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendStartPiiEntitiesDetectionJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_operator_start_pii_entities_detection_job] @@ -58,7 +58,7 @@ Create an Amazon Comprehend Document Classifier To create an Amazon Comprehend Document Classifier, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendCreateDocumentClassifierOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_operator_create_document_classifier] @@ -76,7 +76,7 @@ To wait on the state of an Amazon Comprehend Start PII Entities Detection Job un state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendStartPiiEntitiesDetectionJobCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_pii_entities_detection_job] @@ -91,7 +91,7 @@ To wait on the state of an Amazon Comprehend Document Classifier until it reache state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendCreateDocumentClassifierCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_sensor_create_document_classifier] diff --git a/docs/apache-airflow-providers-amazon/operators/datasync.rst b/docs/apache-airflow-providers-amazon/operators/datasync.rst index 16e65db42dbd..26db89022b5d 100644 --- a/docs/apache-airflow-providers-amazon/operators/datasync.rst +++ b/docs/apache-airflow-providers-amazon/operators/datasync.rst @@ -64,7 +64,7 @@ Execute a task To execute a specific task, you can pass the ``task_arn`` to the operator. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_specific_task] @@ -78,7 +78,7 @@ If one task is found, this one will be executed. If more than one task is found, the operator will raise an Exception. To avoid this, you can set ``allow_random_task_choice`` to ``True`` to randomly choose from candidate tasks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_search_task] @@ -97,7 +97,7 @@ existing Task was found. If these are left to their default value (None) then no Also, because ``delete_task_after_execution`` is set to ``True``, the task will be deleted from AWS DataSync after it completes successfully. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_create_task] diff --git a/docs/apache-airflow-providers-amazon/operators/dms.rst b/docs/apache-airflow-providers-amazon/operators/dms.rst index 2c30e3ca6ec8..b80fb9e2b3ac 100644 --- a/docs/apache-airflow-providers-amazon/operators/dms.rst +++ b/docs/apache-airflow-providers-amazon/operators/dms.rst @@ -52,7 +52,7 @@ Create a replication task To create a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsCreateTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_create_task] @@ -66,7 +66,7 @@ Start a replication task To start a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStartTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_start_task] @@ -80,7 +80,7 @@ Get details of replication tasks To retrieve the details for a list of replication tasks you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDescribeTasksOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_describe_tasks] @@ -94,7 +94,7 @@ Stop a replication task To stop a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStopTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_stop_task] @@ -108,7 +108,7 @@ Delete a replication task To delete a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDeleteTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_delete_task] @@ -125,7 +125,7 @@ Wait for a replication task to complete To check the state of a replication task until it is completed, you can use :class:`~airflow.providers.amazon.aws.sensors.dms.DmsTaskCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_sensor_dms_task_completed] diff --git a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst b/docs/apache-airflow-providers-amazon/operators/dynamodb.rst index d899f0de9223..aab6626109d0 100644 --- a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/operators/dynamodb.rst @@ -51,7 +51,7 @@ Wait for a Single Attribute Value Match: This example shows how to use ``DynamoDBValueSensor`` to wait for a specific attribute/value pair in a DynamoDB item. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_value] :dedent: 4 @@ -62,7 +62,7 @@ Wait for Any Value from a List of Attribute Values: In this example, the sensor waits for the DynamoDB item to have an attribute that matches any value from a provided list. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_any_value] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/ec2.rst b/docs/apache-airflow-providers-amazon/operators/ec2.rst index e5462b32a18f..4cfeb17d2465 100644 --- a/docs/apache-airflow-providers-amazon/operators/ec2.rst +++ b/docs/apache-airflow-providers-amazon/operators/ec2.rst @@ -38,7 +38,7 @@ Start an Amazon EC2 instance To start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StartInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_start_instance] @@ -52,7 +52,7 @@ Stop an Amazon EC2 instance To stop an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StopInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_stop_instance] @@ -66,7 +66,7 @@ Create and start an Amazon EC2 instance To create and start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2CreateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_create_instance] @@ -80,7 +80,7 @@ Terminate an Amazon EC2 instance To terminate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2TerminateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_terminate_instance] @@ -94,7 +94,7 @@ Reboot an Amazon EC2 instance To reboot an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2RebootInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_reboot_instance] @@ -108,7 +108,7 @@ Hibernate an Amazon EC2 instance To hibernate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2HibernateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_hibernate_instance] @@ -125,7 +125,7 @@ Wait on an Amazon EC2 instance state To check the state of an Amazon EC2 instance and wait until it reaches the target state you can use :class:`~airflow.providers.amazon.aws.sensors.ec2.EC2InstanceStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_sensor_ec2_instance_state] diff --git a/docs/apache-airflow-providers-amazon/operators/ecs.rst b/docs/apache-airflow-providers-amazon/operators/ecs.rst index 2c4b3c7e7f58..8cb02d539e7d 100644 --- a/docs/apache-airflow-providers-amazon/operators/ecs.rst +++ b/docs/apache-airflow-providers-amazon/operators/ecs.rst @@ -48,7 +48,7 @@ To create an Amazon ECS cluster you can use All optional parameters to be passed to the Create Cluster API should be passed in the 'create_cluster_kwargs' dict. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_create_cluster] @@ -63,7 +63,7 @@ To delete an Amazon ECS cluster you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeleteClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_delete_cluster] @@ -81,7 +81,7 @@ All optional parameters to be passed to the Register Task Definition API should passed in the 'register_task_kwargs' dict. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_register_task_definition] @@ -96,7 +96,7 @@ To deregister a task definition you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeregisterTaskDefinitionOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_deregister_task_definition] @@ -125,14 +125,14 @@ The parameters you need to configure for this Operator will depend upon which `` * If you are using EC2 as the compute resources in your ECS Cluster, set the parameter to EC2. * If you have integrated external resources in your ECS Cluster, for example using ECS Anywhere, and want to run your containers on those external resources, set the parameter to EXTERNAL. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_run_task] :end-before: [END howto_operator_ecs_run_task] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs] @@ -145,7 +145,7 @@ To stream logs to AWS CloudWatch, you need to define the parameters below. Using the example above, we would add these additional parameters to enable logging to CloudWatch. You need to ensure that you have the appropriate level of permissions (see next section). -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 8 :start-after: [START howto_awslogs_ecs] @@ -228,7 +228,7 @@ the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_cluster_state] @@ -248,7 +248,7 @@ to change that. Raises an AirflowException with the failure reason if the faile is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_definition_state] @@ -267,7 +267,7 @@ both can be overridden with provided values. Raises an AirflowException with the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_state] diff --git a/docs/apache-airflow-providers-amazon/operators/eks.rst b/docs/apache-airflow-providers-amazon/operators/eks.rst index 9f1cc9df61ee..aa774b909595 100644 --- a/docs/apache-airflow-providers-amazon/operators/eks.rst +++ b/docs/apache-airflow-providers-amazon/operators/eks.rst @@ -46,7 +46,7 @@ Note: An AWS IAM role with the following permissions is required: ``eks.amazonaws.com`` must be added to the Trusted Relationships ``AmazonEKSClusterPolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster] @@ -65,7 +65,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_nodegroup] @@ -85,7 +85,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_fargate_profile] @@ -100,7 +100,7 @@ To delete an existing Amazon EKS Cluster you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteClusterOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_cluster] @@ -110,7 +110,7 @@ Note: If the cluster has any attached resources, such as an Amazon EKS Nodegroup Fargate profile, the cluster can not be deleted. Using the ``force`` parameter will attempt to delete any attached resources first. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_force_delete_cluster] @@ -130,7 +130,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_nodegroup] @@ -145,7 +145,7 @@ To delete an existing Amazon EKS managed node group you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteNodegroupOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_nodegroup] @@ -164,7 +164,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_fargate_profile] @@ -178,7 +178,7 @@ Delete an AWS Fargate Profile To delete an existing AWS Fargate Profile you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteFargateProfileOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_fargate_profile] @@ -194,7 +194,7 @@ To run a pod on an existing Amazon EKS Cluster, you can use Note: An Amazon EKS Cluster with underlying compute infrastructure is required. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_pod_operator] @@ -211,7 +211,7 @@ Wait on an Amazon EKS cluster state To check the state of an Amazon EKS Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksClusterStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_cluster] @@ -225,7 +225,7 @@ Wait on an Amazon EKS managed node group state To check the state of an Amazon EKS managed node group until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksNodegroupStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_nodegroup] @@ -239,7 +239,7 @@ Wait on an AWS Fargate profile state To check the state of an AWS Fargate profile until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksFargateProfileSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_fargate] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr.rst index 8a2255ddbf4c..5e32baa151c4 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr.rst @@ -57,7 +57,7 @@ JobFlow configuration To create a job flow on EMR, you need to specify the configuration for the EMR cluster: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :start-after: [START howto_operator_emr_steps_config] :end-before: [END howto_operator_emr_steps_config] @@ -80,7 +80,7 @@ Create the Job Flow In the following code we are creating a new job flow using the configuration as explained above. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_create_job_flow] @@ -98,7 +98,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_add_steps] @@ -116,7 +116,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_terminate_job_flow] @@ -130,7 +130,7 @@ Modify Amazon EMR container To modify an existing EMR container you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_modify_cluster] @@ -144,7 +144,7 @@ Start an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStartNotebookExecutionOperator` to start a notebook execution on an existing notebook attached to a running cluster. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_start_notebook_execution] @@ -158,7 +158,7 @@ Stop an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStopNotebookExecutionOperator` to stop a running notebook execution. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_stop_notebook_execution] @@ -175,7 +175,7 @@ Wait on an EMR notebook execution state To monitor the state of an EMR notebook execution you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrNotebookExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_notebook_execution] @@ -189,7 +189,7 @@ Wait on an Amazon EMR job flow state To monitor the state of an EMR job flow you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrJobFlowSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_job_flow] @@ -203,7 +203,7 @@ Wait on an Amazon EMR step state To monitor the state of an EMR job step you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrStepSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_step] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst index dc31c8f98461..122b22fa0484 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst @@ -48,7 +48,7 @@ the eks cluster that you would like to use , and an eks namespace. Refer to the `EMR on EKS Development guide `__ for more details. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :start-after: [START howto_operator_emr_eks_create_cluster] :end-before: [END howto_operator_emr_eks_create_cluster] @@ -81,7 +81,7 @@ and ``monitoringConfiguration`` to send logs to the ``/aws/emr-eks-spark`` log g Refer to the `EMR on EKS guide `__ for more details on job configuration. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_eks_config] @@ -92,7 +92,7 @@ can store them in a connection or provide them in the DAG. Your AWS region shoul in the ``aws_default`` connection as ``{"region_name": "us-east-1"}`` or a custom connection name that gets passed to the operator with the ``aws_conn_id`` parameter. The operator returns the Job ID of the job run. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_container] @@ -110,7 +110,7 @@ Wait on an Amazon EMR virtual cluster job To wait on the status of an Amazon EMR virtual cluster job to reach a terminal state, you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_container] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst index 9915763e19e0..28c7bb4720a0 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst @@ -43,7 +43,7 @@ create a new EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_create_application] @@ -59,7 +59,7 @@ start an EMR Serverless Job. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_start_job] @@ -93,7 +93,7 @@ stop an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_stop_application] @@ -109,7 +109,7 @@ delete an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_delete_application] @@ -126,7 +126,7 @@ Wait on an EMR Serverless Job state To monitor the state of an EMR Serverless Job you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessJobSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_job] @@ -140,7 +140,7 @@ Wait on an EMR Serverless Application state To monitor the state of an EMR Serverless Application you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessApplicationSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_application] diff --git a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst b/docs/apache-airflow-providers-amazon/operators/eventbridge.rst index 453e5af31070..a39c88414925 100644 --- a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst +++ b/docs/apache-airflow-providers-amazon/operators/eventbridge.rst @@ -48,7 +48,7 @@ Send events to Amazon EventBridge To send custom events to Amazon EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutEventsOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_events] @@ -63,7 +63,7 @@ Create or update a rule on Amazon EventBridge To create or update a rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_rule] @@ -78,7 +78,7 @@ Enable a rule on Amazon EventBridge To enable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeEnableRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_enable_rule] @@ -93,7 +93,7 @@ Disable a rule on Amazon EventBridge To disable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeDisableRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_disable_rule] diff --git a/docs/apache-airflow-providers-amazon/operators/glue.rst b/docs/apache-airflow-providers-amazon/operators/glue.rst index 18a9b887d8b4..88b4d3374dbf 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue.rst +++ b/docs/apache-airflow-providers-amazon/operators/glue.rst @@ -46,7 +46,7 @@ AWS Glue Crawlers allow you to easily extract data from various data sources. To create a new AWS Glue Crawler or run an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue_crawler.GlueCrawlerOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_crawler] @@ -64,7 +64,7 @@ Submit an AWS Glue job To submit a new AWS Glue job you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue] @@ -84,7 +84,7 @@ of your data so that you can make good business decisions. To create a new AWS Glue Data Quality ruleset or update an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_operator] @@ -98,7 +98,7 @@ Start a AWS Glue Data Quality Evaluation Run To start a AWS Glue Data Quality ruleset evaluation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleSetEvaluationRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_ruleset_evaluation_run_operator] @@ -112,7 +112,7 @@ Start a AWS Glue Data Quality Recommendation Run To start a AWS Glue Data Quality rule recommendation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleRecommendationRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_rule_recommendation_run] @@ -129,7 +129,7 @@ Wait on an AWS Glue crawler state To wait on the state of an AWS Glue crawler execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_crawler.GlueCrawlerSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_crawler] @@ -143,7 +143,7 @@ Wait on an AWS Glue job state To wait on the state of an AWS Glue Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueJobSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue] @@ -157,7 +157,7 @@ Wait on an AWS Glue Data Quality Evaluation Run To wait on the state of an AWS Glue Data Quality RuleSet Evaluation Run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleSetEvaluationRunSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_ruleset_evaluation_run] @@ -171,7 +171,7 @@ Wait on an AWS Glue Data Quality Recommendation Run To wait on the state of an AWS Glue Data Quality recommendation run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleRecommendationRunSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_rule_recommendation_run] @@ -185,7 +185,7 @@ Wait on an AWS Glue Catalog Partition To wait for a partition to show up in AWS Glue Catalog until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_catalog_partition.GlueCatalogPartitionSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_catalog_partition] diff --git a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst b/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst index be654335ea1d..2286a5146a59 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst +++ b/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst @@ -46,7 +46,7 @@ Start an AWS Glue DataBrew job To submit a new AWS Glue DataBrew job you can use :class:`~airflow.providers.amazon.aws.operators.glue_databrew.GlueDataBrewStartJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_databrew.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_databrew.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_databrew_start] diff --git a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst b/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst index d7156feabb77..dc351fc1c340 100644 --- a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst +++ b/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst @@ -44,7 +44,7 @@ Create an Amazon Managed Service for Apache Flink Application To create an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2CreateApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_create_application] @@ -58,7 +58,7 @@ Start an Amazon Managed Service for Apache Flink Application To start an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StartApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_start_application] @@ -72,7 +72,7 @@ Stop an Amazon Managed Service for Apache Flink Application To stop an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StopApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_application] @@ -89,7 +89,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to start To wait on the state of an Amazon Managed Service for Apache Flink Application to start you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StartApplicationCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_application] @@ -103,7 +103,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to stop To wait on the state of an Amazon Managed Service for Apache Flink Application to stop you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StopApplicationCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_stop_application] diff --git a/docs/apache-airflow-providers-amazon/operators/lambda.rst b/docs/apache-airflow-providers-amazon/operators/lambda.rst index ef2576a1f541..3a9f1a013245 100644 --- a/docs/apache-airflow-providers-amazon/operators/lambda.rst +++ b/docs/apache-airflow-providers-amazon/operators/lambda.rst @@ -48,7 +48,7 @@ To create an AWS lambda function you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_create_lambda_function] @@ -111,7 +111,7 @@ To invoke an AWS lambda function you can use The only way is `configuring destinations for asynchronous invocation `_ and sensing destination. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_lambda_function] @@ -128,7 +128,7 @@ Wait on an AWS Lambda function deployment state To check the deployment state of an AWS Lambda function until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.lambda_function.LambdaFunctionStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_sensor_lambda_function_state] diff --git a/docs/apache-airflow-providers-amazon/operators/neptune.rst b/docs/apache-airflow-providers-amazon/operators/neptune.rst index 98c0d7dd57c4..7b9204e1c027 100644 --- a/docs/apache-airflow-providers-amazon/operators/neptune.rst +++ b/docs/apache-airflow-providers-amazon/operators/neptune.rst @@ -49,7 +49,7 @@ the aiobotocore module to be installed. .. note:: This operator only starts an existing Neptune database cluster, it does not create a cluster. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_start_neptune_cluster] @@ -65,7 +65,7 @@ To stop a running Neptune database cluster, you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_neptune_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst b/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst index 4fb0b4db9363..4c6f36865581 100644 --- a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst +++ b/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst @@ -46,7 +46,7 @@ Wait for an Amazon OpenSearch Serverless Collection to become active To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.OpenSearchServerlessCollectionActiveSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_opensearch_collection_active] diff --git a/docs/apache-airflow-providers-amazon/operators/quicksight.rst b/docs/apache-airflow-providers-amazon/operators/quicksight.rst index 9cc0abe337e4..4ae412da6e3a 100644 --- a/docs/apache-airflow-providers-amazon/operators/quicksight.rst +++ b/docs/apache-airflow-providers-amazon/operators/quicksight.rst @@ -46,7 +46,7 @@ Amazon QuickSight create ingestion The ``QuickSightCreateIngestionOperator`` creates and starts a new SPICE ingestion for a dataset. The operator also refreshes existing SPICE datasets. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_operator_quicksight_create_ingestion] @@ -62,7 +62,7 @@ Amazon QuickSight ingestion sensor The ``QuickSightSensor`` waits for an Amazon QuickSight create ingestion until it reaches a terminal state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_sensor_quicksight] diff --git a/docs/apache-airflow-providers-amazon/operators/rds.rst b/docs/apache-airflow-providers-amazon/operators/rds.rst index e27bbc2d2f69..9b06c1a04830 100644 --- a/docs/apache-airflow-providers-amazon/operators/rds.rst +++ b/docs/apache-airflow-providers-amazon/operators/rds.rst @@ -41,7 +41,7 @@ To create a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCreateDBSnapshotOperator`. The source database instance must be in the ``available`` or ``storage-optimization`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_snapshot] @@ -56,7 +56,7 @@ To copy a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCopyDBSnapshotOperator`. The source database snapshot must be in the ``available`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_copy_snapshot] @@ -71,7 +71,7 @@ To delete a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDBSnapshotOperator`. The database snapshot must be in the ``available`` state to be deleted. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_snapshot] @@ -86,7 +86,7 @@ To export an Amazon RDS snapshot to Amazon S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSStartExportTaskOperator`. The provided IAM role must have access to the S3 bucket. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_export_task] @@ -101,7 +101,7 @@ To cancel an Amazon RDS export task to S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCancelExportTaskOperator`. Any data that has already been written to the S3 bucket isn't removed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_cancel_export] @@ -118,7 +118,7 @@ This action requires an Amazon SNS topic Amazon Resource Name (ARN). Amazon RDS event notification is only available for not encrypted SNS topics. If you specify an encrypted SNS topic, event notifications are not sent for the topic. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_event_subscription] @@ -132,7 +132,7 @@ Unsubscribe to an Amazon RDS event notification To delete an Amazon RDS event subscription you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteEventSubscriptionOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_event_subscription] @@ -147,7 +147,7 @@ To create a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsCreateDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_instance] @@ -162,7 +162,7 @@ To delete a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_db_instance] @@ -176,7 +176,7 @@ Start a database instance or cluster To start an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStartDbOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_db] @@ -191,7 +191,7 @@ Stop a database instance or cluster To stop an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStopDbOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_stop_db] @@ -209,7 +209,7 @@ To wait for an Amazon RDS instance or cluster to reach a specific status you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsDbSensor`. By default, the sensor waits for a database instance to reach the ``available`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_instance] @@ -225,7 +225,7 @@ To wait for an Amazon RDS snapshot with specific statuses you can use :class:`~airflow.providers.amazon.aws.sensors.rds.RdsSnapshotExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_snapshot_existence] @@ -241,7 +241,7 @@ To wait a for an Amazon RDS snapshot export task with specific statuses you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsExportTaskExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_export_task_existence] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst index a50e56a760bb..bc710b56eecc 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst @@ -40,7 +40,7 @@ Create an Amazon Redshift cluster To create an Amazon Redshift Cluster with the specified parameters you can use :class:`~airflow.providers.amazon.aws.operators.redshift_cluster.RedshiftCreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_cluster] @@ -56,7 +56,7 @@ To resume a 'paused' Amazon Redshift cluster you can use You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. This will ensure that the task is deferred from the Airflow worker slot and polling for the task status happens on the trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_resume_cluster] @@ -71,7 +71,7 @@ To pause an ``available`` Amazon Redshift cluster you can use :class:`RedshiftPauseClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_pause_cluster] @@ -85,7 +85,7 @@ Create an Amazon Redshift cluster snapshot To create Amazon Redshift cluster snapshot you can use :class:`RedshiftCreateClusterSnapshotOperator ` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_create_cluster_snapshot] @@ -99,7 +99,7 @@ Delete an Amazon Redshift cluster snapshot To delete Amazon Redshift cluster snapshot you can use :class:`RedshiftDeleteClusterSnapshotOperator ` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster_snapshot] @@ -114,7 +114,7 @@ To delete an Amazon Redshift cluster you can use :class:`RedshiftDeleteClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster] @@ -131,7 +131,7 @@ Wait on an Amazon Redshift cluster state To check the state of an Amazon Redshift Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.redshift_cluster.RedshiftClusterSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_sensor_redshift_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst index 2638e1732cd6..762eced74fa6 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst @@ -48,7 +48,7 @@ statements against an Amazon Redshift cluster. This differs from ``RedshiftSQLOperator`` in that it allows users to query and retrieve data via the AWS API and avoid the necessity of a Postgres connection. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data] @@ -60,7 +60,7 @@ Reuse a session when executing multiple statements Specify the ``session_keep_alive_seconds`` parameter on an upstream task. In a downstream task, get the session ID from the XCom and pass it to the ``session_id`` parameter. This is useful when you work with temporary tables. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data_session_reuse] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst index 86444d664f3a..5f4f28b38c56 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst @@ -40,7 +40,7 @@ The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries again To execute a SQL query against an Amazon Redshift cluster without using a Redshift connection, please check ``RedshiftDataOperator``. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] diff --git a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst b/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst index c85e7ac294c7..ab5b6acbe704 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst +++ b/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst @@ -45,7 +45,7 @@ use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierCreate This Operator returns a dictionary of information related to the initiated job such as *jobId*, which is required for subsequent tasks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_create_job] @@ -59,7 +59,7 @@ Upload archive to an Amazon Glacier To add an archive to an Amazon S3 Glacier vault use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierUploadArchiveOperator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_upload_archive] @@ -76,7 +76,7 @@ Wait on an Amazon Glacier job state To wait on the status of an Amazon Glacier Job to reach a terminal state use :class:`~airflow.providers.amazon.aws.sensors.glacier.GlacierJobOperationSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sensor_glacier_job_operation] diff --git a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst b/docs/apache-airflow-providers-amazon/operators/s3/s3.rst index 41e5c7149bb1..ff48c1d5d15b 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/s3/s3.rst @@ -38,7 +38,7 @@ Create an Amazon S3 bucket To create an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateBucketOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_bucket] @@ -52,7 +52,7 @@ Delete an Amazon S3 bucket To delete an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket] @@ -66,7 +66,7 @@ Set the tags for an Amazon S3 bucket To set the tags for an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3PutBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_put_bucket_tagging] @@ -80,7 +80,7 @@ Get the tag of an Amazon S3 bucket To get the tag set associated with an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3GetBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_get_bucket_tagging] @@ -94,7 +94,7 @@ Delete the tags of an Amazon S3 bucket To delete the tags of an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket_tagging] @@ -108,7 +108,7 @@ Create an Amazon S3 object To create a new (or replace) Amazon S3 object you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateObjectOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_object] @@ -123,7 +123,7 @@ To copy an Amazon S3 object from one bucket to another you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CopyObjectOperator`. The Amazon S3 connection used here needs to have access to both source and destination bucket/key. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_copy_object] @@ -137,7 +137,7 @@ Delete Amazon S3 objects To delete one or multiple Amazon S3 objects you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteObjectsOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_objects] @@ -153,7 +153,7 @@ To transform the data from one Amazon S3 object and save it to another object yo You can also apply an optional `Amazon S3 Select expression `_ to select the data you want to retrieve from ``source_s3_key`` using ``select_expression``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_file_transform] @@ -169,7 +169,7 @@ To list all Amazon S3 prefixes within an Amazon S3 bucket you can use See `here `__ for more information about Amazon S3 prefixes. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list_prefixes] @@ -184,7 +184,7 @@ To list all Amazon S3 objects within an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3ListOperator`. You can specify a ``prefix`` to filter the objects whose name begins with such prefix. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list] @@ -208,7 +208,7 @@ Please keep in mind, especially when used to check a large volume of keys, that To check one file: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key] @@ -216,7 +216,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys] @@ -224,7 +224,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex] @@ -244,13 +244,13 @@ multiple files can match one key. The list of matched S3 object attributes conta [{"Size": int}] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function_definition] :end-before: [END howto_sensor_s3_key_function_definition] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function] @@ -262,7 +262,7 @@ the triggerer asynchronously. Note that this will need triggerer to be available To check one file: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key_deferrable] @@ -270,7 +270,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys_deferrable] @@ -278,7 +278,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex_deferrable] @@ -295,7 +295,7 @@ the inactivity period has passed with no increase in the number of objects you c Note, this sensor will not behave correctly in reschedule mode, as the state of the listed objects in the Amazon S3 bucket will be lost between rescheduled invocations. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_keys_unchanged] diff --git a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst b/docs/apache-airflow-providers-amazon/operators/sagemaker.rst index c77b689693b0..c2f433267c30 100644 --- a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst +++ b/docs/apache-airflow-providers-amazon/operators/sagemaker.rst @@ -42,7 +42,7 @@ Create an Amazon SageMaker processing job To create an Amazon Sagemaker processing job to sanitize your dataset you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerProcessingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_processing] @@ -56,7 +56,7 @@ Create an Amazon SageMaker training job To create an Amazon Sagemaker training job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTrainingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_training] @@ -70,7 +70,7 @@ Create an Amazon SageMaker model To create an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerModelOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_model] @@ -84,7 +84,7 @@ Start a hyperparameter tuning job To start a hyperparameter tuning job for an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTuningOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_tuning] @@ -98,7 +98,7 @@ Delete an Amazon SageMaker model To delete an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteModelOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_delete_model] @@ -112,7 +112,7 @@ Create an Amazon SageMaker transform job To create an Amazon Sagemaker transform job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_transform] @@ -126,7 +126,7 @@ Create an Amazon SageMaker endpoint config job To create an Amazon Sagemaker endpoint config job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointConfigOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint_config] @@ -140,7 +140,7 @@ Create an Amazon SageMaker endpoint job To create an Amazon Sagemaker endpoint you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint] @@ -154,7 +154,7 @@ Start an Amazon SageMaker pipeline execution To trigger an execution run for an already-defined Amazon Sagemaker pipeline, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_start_pipeline] @@ -168,7 +168,7 @@ Stop an Amazon SageMaker pipeline execution To stop an Amazon Sagemaker pipeline execution that is currently running, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_stop_pipeline] @@ -186,7 +186,7 @@ It consists of an inference specification that defines the inference image to us A model package group is a collection of model packages. You can use this operator to add a new version and model package to the group for every DAG run. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_register] @@ -202,7 +202,7 @@ An AutoML experiment will take some input data in CSV and the column it should l and train models on it without needing human supervision. The output is placed in an S3 bucket, and automatically deployed if configured for it. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] @@ -216,7 +216,7 @@ Create an Experiment for later use To create a SageMaker experiment, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateExperimentOperator`. This creates an experiment so that it's ready to be associated with processing, training and transform jobs. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_experiment] @@ -230,7 +230,7 @@ Create a SageMaker Notebook Instance To create a SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateNotebookOperator`. This creates a SageMaker Notebook Instance ready to run Jupyter notebooks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_create] @@ -244,7 +244,7 @@ Stop a SageMaker Notebook Instance To terminate SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopNotebookOperator`. This terminates the ML compute instance and disconnects the ML storage volume. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_stop] @@ -258,7 +258,7 @@ Start a SageMaker Notebook Instance To launch a SageMaker Notebook Instance and re-attach an ML storage volume, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartNotebookOperator`. This launches a new ML compute instance with the latest version of the libraries and attached your ML storage volume. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_start] @@ -273,7 +273,7 @@ Delete a SageMaker Notebook Instance To delete a SageMaker Notebook Instance, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteNotebookOperator`. This terminates the instance and deletes the ML storage volume and network interface associated with the instance. The instance must be stopped before it can be deleted. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_delete] @@ -290,7 +290,7 @@ Wait on an Amazon SageMaker training job state To check the state of an Amazon Sagemaker training job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTrainingSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_training] @@ -304,7 +304,7 @@ Wait on an Amazon SageMaker transform job state To check the state of an Amazon Sagemaker transform job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_transform] @@ -318,7 +318,7 @@ Wait on an Amazon SageMaker tuning job state To check the state of an Amazon Sagemaker hyperparameter tuning job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTuningSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_tuning] @@ -332,7 +332,7 @@ Wait on an Amazon SageMaker endpoint state To check the state of an Amazon Sagemaker endpoint until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerEndpointSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_endpoint] @@ -346,7 +346,7 @@ Wait on an Amazon SageMaker pipeline execution state To check the state of an Amazon Sagemaker pipeline execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerPipelineSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_pipeline] @@ -360,7 +360,7 @@ Wait on an Amazon SageMaker AutoML experiment state To check the state of an Amazon Sagemaker AutoML job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerAutoMLSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] diff --git a/docs/apache-airflow-providers-amazon/operators/sns.rst b/docs/apache-airflow-providers-amazon/operators/sns.rst index e589e38f89b0..f4eb699e5280 100644 --- a/docs/apache-airflow-providers-amazon/operators/sns.rst +++ b/docs/apache-airflow-providers-amazon/operators/sns.rst @@ -48,7 +48,7 @@ Publish a message to an existing SNS topic To publish a message to an Amazon SNS Topic you can use :class:`~airflow.providers.amazon.aws.operators.sns.SnsPublishOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sns.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sns.py :language: python :dedent: 4 :start-after: [START howto_operator_sns_publish_operator] diff --git a/docs/apache-airflow-providers-amazon/operators/sqs.rst b/docs/apache-airflow-providers-amazon/operators/sqs.rst index 77fb38e2d856..3eb0087079c3 100644 --- a/docs/apache-airflow-providers-amazon/operators/sqs.rst +++ b/docs/apache-airflow-providers-amazon/operators/sqs.rst @@ -50,7 +50,7 @@ To publish a message to an Amazon SQS queue you can use the In the following example, the task ``publish_to_queue`` publishes a message containing the task instance and the execution date to a queue with a default name of ``Airflow-Example-Queue``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_operator_sqs] @@ -68,7 +68,7 @@ To read messages from an Amazon SQS queue until exhausted use the :class:`~airflow.providers.amazon.aws.sensors.sqs.SqsSensor` This sensor can also be run in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_sensor_sqs] diff --git a/docs/apache-airflow-providers-amazon/operators/step_functions.rst b/docs/apache-airflow-providers-amazon/operators/step_functions.rst index 5ab5d19e6829..a83eafa6ae7a 100644 --- a/docs/apache-airflow-providers-amazon/operators/step_functions.rst +++ b/docs/apache-airflow-providers-amazon/operators/step_functions.rst @@ -45,7 +45,7 @@ To start a new AWS Step Functions state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionStartExecutionOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_start_execution] @@ -59,7 +59,7 @@ Get an AWS Step Functions execution output To fetch the output from an AWS Step Function state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionGetExecutionOutputOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_get_execution_output] @@ -76,7 +76,7 @@ Wait on an AWS Step Functions state machine execution state To wait on the state of an AWS Step Function state machine execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.step_function.StepFunctionExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_sensor_step_function_execution] diff --git a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst index a3b9df5eb1ee..cf68049f9516 100644 --- a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst @@ -39,7 +39,7 @@ To copy data from an Azure Blob Storage container to an Amazon S3 bucket you can Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_azure_blob_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_azure_blob_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_azure_blob_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst index 74c4b78cef49..c8d18f43c3c2 100644 --- a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3] @@ -57,7 +57,7 @@ Example usage: To parallelize the replication, users can create multiple ``DynamoDBToS3Operator`` tasks using the ``TotalSegments`` parameter. For instance to replicate with parallelism of 2, create two tasks: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_segmented] @@ -67,7 +67,7 @@ Users can also pass in ``point_in_time_export`` boolean param to ``DynamoDBToS3O Full export example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_full_export] @@ -75,7 +75,7 @@ Full export example usage: Incremental export example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_incremental_export] diff --git a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst index 0f1904208538..13166a753232 100644 --- a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ftp_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_ftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst index 0f44f477016b..eb032d813c1c 100644 --- a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst @@ -39,7 +39,7 @@ To copy data from a Google Cloud Storage bucket to an Amazon S3 bucket you can u Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_gcs_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_gcs_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_gcs_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst b/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst index 33ffa8033e2f..775fda6491c4 100644 --- a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst +++ b/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst @@ -38,7 +38,7 @@ Amazon S3 Glacier To GCS transfer operator To transfer data from an Amazon Glacier vault to Google Cloud Storage you can use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierToGCSOperator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_transfer_glacier_to_gcs] diff --git a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst index b5593f9232b1..a1d2c8ea99bf 100644 --- a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst @@ -38,7 +38,7 @@ Google Sheets to Amazon S3 transfer operator This example loads data from Google Sheets and save it to an Amazon S3 file. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_sheets_to_s3] @@ -57,7 +57,7 @@ It searches for up to 50 videos (due to pagination) in a given time range (``YOUTUBE_VIDEO_PUBLISHED_AFTER``, ``YOUTUBE_VIDEO_PUBLISHED_BEFORE``) on a YouTube channel (``YOUTUBE_CHANNEL_ID``) saves the response in Amazon S3 and also pushes the data to xcom. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_search_to_s3] @@ -66,7 +66,7 @@ saves the response in Amazon S3 and also pushes the data to xcom. It passes over the YouTube IDs to the next request which then gets the information (``YOUTUBE_VIDEO_FIELDS``) for the requested videos and saves them in Amazon S3 (``S3_BUCKET_NAME``). -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_list_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst b/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst index 733527e1acf5..a13cf012b4b9 100644 --- a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst @@ -44,7 +44,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_hive_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_hive_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_hive_to_dynamodb] diff --git a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst index d40e66f861b2..d28c4508aae1 100644 --- a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_http_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_http_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_http_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst index 5787587eb018..cb6f46d338ef 100644 --- a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst @@ -38,7 +38,7 @@ Imap Attachment To Amazon S3 transfer operator To save an email attachment via IMAP protocol from an email server to an Amazon S3 Bucket you can use :class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_imap_attachment_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst index 403db3ea9a82..d1ca16784446 100644 --- a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_local_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst index 8ad30e583071..a9d8a441e670 100644 --- a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_mongo_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_mongo_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_mongo_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst index 980d44d48b75..f2b32d35c52d 100644 --- a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_redshift_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst index 630863547d7a..002d47728c24 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb] @@ -57,7 +57,7 @@ Example usage: To load S3 data into an existing DynamoDB table use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb_existing_table] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst index 3db4f321b212..ecb33de0aa5f 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_ftp.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_ftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_ftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst index 431973653e1a..bcef5b272ac7 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift] @@ -50,7 +50,7 @@ Example usage: Example of ingesting multiple keys: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift_multiple_keys] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst index 9353721fa509..56391b634c76 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst index 590b625413cd..90899d6a399d 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage with a parser for a csv file. This parser loads the file into memory and returns a list of rows: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql] @@ -52,7 +52,7 @@ file into memory and returns a list of rows: Example usage with a parser function that returns a generator. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql_generator] diff --git a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst index 7e08226d3921..b0a41c16699b 100644 --- a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst @@ -38,7 +38,7 @@ Extract data from Salesforce to Amazon S3 transfer operator The following example demonstrates a use case of extracting account data from a Salesforce instance and upload to an Amazon S3 bucket. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_salesforce_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_salesforce_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_salesforce_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst index 0f8dfd79ed4b..821bf211097b 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sftp_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst index e01d41594293..5e088f3a1275 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst @@ -44,7 +44,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3] @@ -57,7 +57,7 @@ We can group the data in the table by passing the ``groupby_kwargs`` param. This Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3_with_groupby_param] diff --git a/docs/apache-airflow-providers-apache-beam/changelog.rst b/docs/apache-airflow-providers-apache-beam/changelog.rst index 4c423b292bd9..7728c11ed6a6 100644 --- a/docs/apache-airflow-providers-apache-beam/changelog.rst +++ b/docs/apache-airflow-providers-apache-beam/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/beam/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/beam/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index a8b7396b6706..7b311c10fcf4 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -40,7 +40,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/beam/index> + System Tests <_api/tests/system/apache/beam/index> .. toctree:: :hidden: @@ -48,7 +48,7 @@ :caption: Resources PyPI Repository - Example DAGs + Example DAGs .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-apache-beam/operators.rst b/docs/apache-airflow-providers-apache-beam/operators.rst index b536c514f785..da4e696d2ba7 100644 --- a/docs/apache-airflow-providers-apache-beam/operators.rst +++ b/docs/apache-airflow-providers-apache-beam/operators.rst @@ -54,13 +54,13 @@ recommend avoiding unless the Dataflow job requires it. Python Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file] :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file] @@ -71,13 +71,13 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file_async] :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file_async] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file_async] @@ -86,13 +86,13 @@ lot less resources wasted on idle Operators or Sensors: Python Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file] :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] @@ -104,7 +104,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file_async] @@ -126,7 +126,7 @@ has the ability to download or available on the local filesystem (provide the ab Java Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_beam.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_beam.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_direct_runner_pipeline] @@ -135,7 +135,7 @@ Java Pipelines with DirectRunner Java Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_java_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_java_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_dataflow_runner_pipeline] @@ -159,13 +159,13 @@ init the module and install dependencies with ``go run init example.com/main`` a Go Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_direct_runner_pipeline_local_file] :end-before: [END howto_operator_start_go_direct_runner_pipeline_local_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_direct_runner_pipeline_gcs_file] @@ -174,13 +174,13 @@ Go Pipelines with DirectRunner Go Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_dataflow_runner_pipeline_gcs_file] :end-before: [END howto_operator_start_go_dataflow_runner_pipeline_gcs_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_dataflow_runner_pipeline_async_gcs_file] diff --git a/docs/apache-airflow-providers-apache-cassandra/changelog.rst b/docs/apache-airflow-providers-apache-cassandra/changelog.rst index 3fce287c1c07..2119d2c00267 100644 --- a/docs/apache-airflow-providers-apache-cassandra/changelog.rst +++ b/docs/apache-airflow-providers-apache-cassandra/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/cassandra/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst index 0ca1a4f13804..0a3e52999e40 100644 --- a/docs/apache-airflow-providers-apache-cassandra/index.rst +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/cassandra/index> + System Tests <_api/tests/system/apache/cassandra/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-cassandra/operators.rst b/docs/apache-airflow-providers-apache-cassandra/operators.rst index e0a1e10b559a..24bc51ef9027 100644 --- a/docs/apache-airflow-providers-apache-cassandra/operators.rst +++ b/docs/apache-airflow-providers-apache-cassandra/operators.rst @@ -50,7 +50,7 @@ Use the ``keys`` parameter to poke until the provided record is found. The exist Example use of these sensors ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/cassandra/example_cassandra_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/cassandra/example_cassandra_dag.py :language: python :start-after: [START howto_operator_cassandra_sensors] :end-before: [END howto_operator_cassandra_sensors] diff --git a/docs/apache-airflow-providers-apache-drill/changelog.rst b/docs/apache-airflow-providers-apache-drill/changelog.rst index f2795b435a72..79971613d2f6 100644 --- a/docs/apache-airflow-providers-apache-drill/changelog.rst +++ b/docs/apache-airflow-providers-apache-drill/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/drill/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/drill/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-drill/index.rst b/docs/apache-airflow-providers-apache-drill/index.rst index cf05944534b7..8ba9ef37a667 100644 --- a/docs/apache-airflow-providers-apache-drill/index.rst +++ b/docs/apache-airflow-providers-apache-drill/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/drill/index> + System Tests <_api/tests/system/apache/drill/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-drill/operators.rst b/docs/apache-airflow-providers-apache-drill/operators.rst index 7b647c0a2efe..47784b67fedc 100644 --- a/docs/apache-airflow-providers-apache-drill/operators.rst +++ b/docs/apache-airflow-providers-apache-drill/operators.rst @@ -39,7 +39,7 @@ The ``sql`` parameter can be templated and be an external ``.sql`` file. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/drill/example_drill_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/drill/example_drill_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_drill] diff --git a/docs/apache-airflow-providers-apache-druid/changelog.rst b/docs/apache-airflow-providers-apache-druid/changelog.rst index f1d8377ab560..652948c8ee8c 100644 --- a/docs/apache-airflow-providers-apache-druid/changelog.rst +++ b/docs/apache-airflow-providers-apache-druid/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/druid/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/druid/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-druid/index.rst b/docs/apache-airflow-providers-apache-druid/index.rst index a351b7a40e35..0b920743839c 100644 --- a/docs/apache-airflow-providers-apache-druid/index.rst +++ b/docs/apache-airflow-providers-apache-druid/index.rst @@ -47,7 +47,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/druid/index> + System Tests <_api/tests/system/apache/druid/index> .. toctree:: @@ -57,7 +57,7 @@ PyPI Repository Installing from sources - Example DAGs + Example DAGs .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-druid/operators.rst b/docs/apache-airflow-providers-apache-druid/operators.rst index 6930e7b4d3ae..758c51c53853 100644 --- a/docs/apache-airflow-providers-apache-druid/operators.rst +++ b/docs/apache-airflow-providers-apache-druid/operators.rst @@ -38,7 +38,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.druid. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/druid/example_druid_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/druid/example_druid_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_druid_submit] diff --git a/docs/apache-airflow-providers-apache-flink/changelog.rst b/docs/apache-airflow-providers-apache-flink/changelog.rst index c6c5d10cb7eb..07ffea0939e6 100644 --- a/docs/apache-airflow-providers-apache-flink/changelog.rst +++ b/docs/apache-airflow-providers-apache-flink/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/flink/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/flink/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-flink/index.rst b/docs/apache-airflow-providers-apache-flink/index.rst index 09ede32f377f..a909ecfb7400 100644 --- a/docs/apache-airflow-providers-apache-flink/index.rst +++ b/docs/apache-airflow-providers-apache-flink/index.rst @@ -47,7 +47,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-hdfs/changelog.rst b/docs/apache-airflow-providers-apache-hdfs/changelog.rst index 5504c6928783..3c984d0e1129 100644 --- a/docs/apache-airflow-providers-apache-hdfs/changelog.rst +++ b/docs/apache-airflow-providers-apache-hdfs/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/hdfs/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-hive/changelog.rst b/docs/apache-airflow-providers-apache-hive/changelog.rst index 59bdc59e2d44..838d1ce4de31 100644 --- a/docs/apache-airflow-providers-apache-hive/changelog.rst +++ b/docs/apache-airflow-providers-apache-hive/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/hive/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/hive/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst index f99065fba4aa..7a78a85eff54 100644 --- a/docs/apache-airflow-providers-apache-hive/index.rst +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/hive/index> + System Tests <_api/tests/system/apache/hive/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources Macros diff --git a/docs/apache-airflow-providers-apache-hive/operators.rst b/docs/apache-airflow-providers-apache-hive/operators.rst index 7a92cba9f2dd..d6d65a893a9d 100644 --- a/docs/apache-airflow-providers-apache-hive/operators.rst +++ b/docs/apache-airflow-providers-apache-hive/operators.rst @@ -27,7 +27,7 @@ HiveOperator This operator executes hql code or hive script in a specific Hive database. -.. exampleinclude:: /../../tests/system/providers/apache/hive/example_twitter_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/hive/example_twitter_dag.py :language: python :dedent: 4 :start-after: [START create_hive] diff --git a/docs/apache-airflow-providers-apache-iceberg/changelog.rst b/docs/apache-airflow-providers-apache-iceberg/changelog.rst index daefe9612ee6..220a77a762b6 100644 --- a/docs/apache-airflow-providers-apache-iceberg/changelog.rst +++ b/docs/apache-airflow-providers-apache-iceberg/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/iceberg/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-iceberg/index.rst b/docs/apache-airflow-providers-apache-iceberg/index.rst index da33da6e4dd3..6bcfd228a8a8 100644 --- a/docs/apache-airflow-providers-apache-iceberg/index.rst +++ b/docs/apache-airflow-providers-apache-iceberg/index.rst @@ -41,7 +41,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/iceberg/index> + System Tests <_api/tests/system/apache/iceberg/index> .. toctree:: @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources Python API <_api/airflow/providers/apache/iceberg/index> diff --git a/docs/apache-airflow-providers-apache-impala/changelog.rst b/docs/apache-airflow-providers-apache-impala/changelog.rst index 8f7e22d89897..ad7e0972ce92 100644 --- a/docs/apache-airflow-providers-apache-impala/changelog.rst +++ b/docs/apache-airflow-providers-apache-impala/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/impala/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/impala/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kafka/changelog.rst b/docs/apache-airflow-providers-apache-kafka/changelog.rst index 258e2231535f..62b89d1ca33e 100644 --- a/docs/apache-airflow-providers-apache-kafka/changelog.rst +++ b/docs/apache-airflow-providers-apache-kafka/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/kafka/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/kafka/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kafka/index.rst b/docs/apache-airflow-providers-apache-kafka/index.rst index 0879b57fb7f1..9575d35490b5 100644 --- a/docs/apache-airflow-providers-apache-kafka/index.rst +++ b/docs/apache-airflow-providers-apache-kafka/index.rst @@ -53,7 +53,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/kafka/index> + System Tests <_api/tests/system/apache/kafka/index> .. toctree:: @@ -61,7 +61,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-kafka/operators/index.rst b/docs/apache-airflow-providers-apache-kafka/operators/index.rst index da5e99b4b7ec..47ae3d63615f 100644 --- a/docs/apache-airflow-providers-apache-kafka/operators/index.rst +++ b/docs/apache-airflow-providers-apache-kafka/operators/index.rst @@ -33,7 +33,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_operator_consume_from_topic] @@ -58,7 +58,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_operator_produce_to_topic] diff --git a/docs/apache-airflow-providers-apache-kafka/sensors.rst b/docs/apache-airflow-providers-apache-kafka/sensors.rst index 4014154640fe..02fd89e5ed64 100644 --- a/docs/apache-airflow-providers-apache-kafka/sensors.rst +++ b/docs/apache-airflow-providers-apache-kafka/sensors.rst @@ -35,7 +35,7 @@ Using the sensor """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_sensor_await_message] @@ -62,7 +62,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the sensor """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_event_listener.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_event_listener.py :language: python :dedent: 4 :start-after: [START howto_sensor_await_message_trigger_function] diff --git a/docs/apache-airflow-providers-apache-kylin/changelog.rst b/docs/apache-airflow-providers-apache-kylin/changelog.rst index 2fbe89478c99..af326254ace9 100644 --- a/docs/apache-airflow-providers-apache-kylin/changelog.rst +++ b/docs/apache-airflow-providers-apache-kylin/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/kylin/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/kylin/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst index f38208c5bab4..b496932307d0 100644 --- a/docs/apache-airflow-providers-apache-kylin/index.rst +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/kylin/index> + System Tests <_api/tests/system/apache/kylin/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-livy/changelog.rst b/docs/apache-airflow-providers-apache-livy/changelog.rst index 5b0b68391eb1..cda7f99ad754 100644 --- a/docs/apache-airflow-providers-apache-livy/changelog.rst +++ b/docs/apache-airflow-providers-apache-livy/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/livy/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/livy/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst index f662f0efdbfd..818766c0dd3a 100644 --- a/docs/apache-airflow-providers-apache-livy/index.rst +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/livy/index> + System Tests <_api/tests/system/apache/livy/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-livy/operators.rst b/docs/apache-airflow-providers-apache-livy/operators.rst index 5ef0e0b71a59..851f524c2a84 100644 --- a/docs/apache-airflow-providers-apache-livy/operators.rst +++ b/docs/apache-airflow-providers-apache-livy/operators.rst @@ -29,7 +29,7 @@ LivyOperator This operator wraps the Apache Livy batch REST API, allowing to submit a Spark application to the underlying cluster. -.. exampleinclude:: /../../tests/system/providers/apache/livy/example_livy.py +.. exampleinclude:: /../../providers/tests/system/apache/livy/example_livy.py :language: python :start-after: [START create_livy] :end-before: [END create_livy] @@ -38,7 +38,7 @@ You can also run this operator in deferrable mode by setting the parameter ``def This will lead to efficient utilization of Airflow workers as polling for job status happens on the triggerer asynchronously. Note that this will need triggerer to be available on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/apache/livy/example_livy.py +.. exampleinclude:: /../../providers/tests/system/apache/livy/example_livy.py :language: python :start-after: [START create_livy_deferrable] :end-before: [END create_livy_deferrable] diff --git a/docs/apache-airflow-providers-apache-pig/changelog.rst b/docs/apache-airflow-providers-apache-pig/changelog.rst index d1b5cc0aa478..e6c7d50b406b 100644 --- a/docs/apache-airflow-providers-apache-pig/changelog.rst +++ b/docs/apache-airflow-providers-apache-pig/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/pig/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/pig/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst index 672c07de9a8b..efab88e3f964 100644 --- a/docs/apache-airflow-providers-apache-pig/index.rst +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -47,14 +47,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/pig/index> + System Tests <_api/tests/system/apache/pig/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-pig/operators.rst b/docs/apache-airflow-providers-apache-pig/operators.rst index 42be0eeac496..601970051abc 100644 --- a/docs/apache-airflow-providers-apache-pig/operators.rst +++ b/docs/apache-airflow-providers-apache-pig/operators.rst @@ -26,7 +26,7 @@ Pig programs are amenable to substantial parallelization, which in turns enables Use the :class:`~airflow.providers.apache.pig.operators.pig.PigOperator` to execute a pig script. -.. exampleinclude:: /../../tests/system/providers/apache/pig/example_pig.py +.. exampleinclude:: /../../providers/tests/system/apache/pig/example_pig.py :language: python :start-after: [START create_pig] :end-before: [END create_pig] diff --git a/docs/apache-airflow-providers-apache-pinot/changelog.rst b/docs/apache-airflow-providers-apache-pinot/changelog.rst index 82c9498bc3dd..6ec0eddf207d 100644 --- a/docs/apache-airflow-providers-apache-pinot/changelog.rst +++ b/docs/apache-airflow-providers-apache-pinot/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/pinot/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/pinot/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-pinot/index.rst b/docs/apache-airflow-providers-apache-pinot/index.rst index 32cf0f675af5..4eb704a92341 100644 --- a/docs/apache-airflow-providers-apache-pinot/index.rst +++ b/docs/apache-airflow-providers-apache-pinot/index.rst @@ -40,7 +40,7 @@ :maxdepth: 1 :caption: References - Example DAGs + Example DAGs Python API <_api/airflow/providers/apache/pinot/index> PyPI Repository Installing from sources @@ -50,7 +50,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/pinot/index> + System Tests <_api/tests/system/apache/pinot/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-pinot/operators.rst b/docs/apache-airflow-providers-apache-pinot/operators.rst index dba3b2a05959..0882d856ca1c 100644 --- a/docs/apache-airflow-providers-apache-pinot/operators.rst +++ b/docs/apache-airflow-providers-apache-pinot/operators.rst @@ -40,7 +40,7 @@ Parameters For parameter definition, take a look at :class:`~airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook` -.. exampleinclude:: /../../tests/system/providers/apache/pinot/example_pinot_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/pinot/example_pinot_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_pinot_admin_hook] @@ -61,7 +61,7 @@ Parameters For parameter definition, take a look at :class:`~airflow.providers.apache.pinot.hooks.pinot.PinotDbApiHook` -.. exampleinclude:: /../../tests/system/providers/apache/pinot/example_pinot_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/pinot/example_pinot_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_pinot_dbapi_example] diff --git a/docs/apache-airflow-providers-apache-spark/changelog.rst b/docs/apache-airflow-providers-apache-spark/changelog.rst index a21049f9e838..7714d7d18e43 100644 --- a/docs/apache-airflow-providers-apache-spark/changelog.rst +++ b/docs/apache-airflow-providers-apache-spark/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/spark/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/spark/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst b/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst index 1755e079b921..ba61f31e9d99 100644 --- a/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst +++ b/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst @@ -44,7 +44,7 @@ Example The following example shows how to use the ``@task.pyspark`` decorator. Note that the ``spark`` and ``sc`` objects are injected into the function. -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_pyspark.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_pyspark.py :language: python :dedent: 4 :start-after: [START task_pyspark] diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index 6e36428a4629..4d96f64dfc93 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/spark/index> + System Tests <_api/tests/system/apache/spark/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-spark/operators.rst b/docs/apache-airflow-providers-apache-spark/operators.rst index f6c20985f24c..63e728b57332 100644 --- a/docs/apache-airflow-providers-apache-spark/operators.rst +++ b/docs/apache-airflow-providers-apache-spark/operators.rst @@ -44,7 +44,7 @@ Using the operator Using ``cmd_type`` parameter, is possible to transfer data from Spark to a database (``spark_to_jdbc``) or from a database to Spark (``jdbc_to_spark``), which will write the table using the Spark command ``saveAsTable``. -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_jdbc] @@ -69,7 +69,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.spark. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_sql] @@ -92,7 +92,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.spark. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_submit] diff --git a/docs/apache-airflow-providers-apprise/changelog.rst b/docs/apache-airflow-providers-apprise/changelog.rst index 3634dd238a43..202bf0b8520c 100644 --- a/docs/apache-airflow-providers-apprise/changelog.rst +++ b/docs/apache-airflow-providers-apprise/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apprise/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apprise/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-arangodb/changelog.rst b/docs/apache-airflow-providers-arangodb/changelog.rst index c3ddda82157e..c4229fd8a9c8 100644 --- a/docs/apache-airflow-providers-arangodb/changelog.rst +++ b/docs/apache-airflow-providers-arangodb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/arangodb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/arangodb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-arangodb/operators/index.rst b/docs/apache-airflow-providers-arangodb/operators/index.rst index 33b4e10064c4..186e000a984b 100644 --- a/docs/apache-airflow-providers-arangodb/operators/index.rst +++ b/docs/apache-airflow-providers-arangodb/operators/index.rst @@ -32,7 +32,7 @@ Callable as you like. An example of Listing all Documents in **students** collection can be implemented as following: -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_operator_arangodb] :end-before: [END howto_aql_operator_arangodb] @@ -40,7 +40,7 @@ An example of Listing all Documents in **students** collection can be implemente You can also provide file template (.sql) to load query, remember path is relative to **dags/** folder, if you want to provide any other path please provide **template_searchpath** while creating **DAG** object, -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_operator_template_file_arangodb] :end-before: [END howto_aql_operator_template_file_arangodb] @@ -53,14 +53,14 @@ AQL query in `ArangoDB `__. An example for waiting a document in **students** collection with student name **judy** can be implemented as following: -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_sensor_arangodb] :end-before: [END howto_aql_sensor_arangodb] Similar to **AQLOperator**, You can also provide file template to load query - -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_sensor_template_file_arangodb] :end-before: [END howto_aql_sensor_template_file_arangodb] diff --git a/docs/apache-airflow-providers-asana/changelog.rst b/docs/apache-airflow-providers-asana/changelog.rst index 92a6d94d0f8d..ae927055eb9f 100644 --- a/docs/apache-airflow-providers-asana/changelog.rst +++ b/docs/apache-airflow-providers-asana/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/asana/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/asana/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-asana/index.rst b/docs/apache-airflow-providers-asana/index.rst index ce04f8b4ae87..edc88f080c3f 100644 --- a/docs/apache-airflow-providers-asana/index.rst +++ b/docs/apache-airflow-providers-asana/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/asana/index> + System Tests <_api/tests/system/asana/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-asana/operators/asana.rst b/docs/apache-airflow-providers-asana/operators/asana.rst index aff9f9f51e3d..5c9cdd0def8a 100644 --- a/docs/apache-airflow-providers-asana/operators/asana.rst +++ b/docs/apache-airflow-providers-asana/operators/asana.rst @@ -86,7 +86,7 @@ the Asana connection to use to connect to your account (``conn_id``). There are `task attributes you can overwrite `_ through the ``task_parameters``. -.. exampleinclude:: /../../tests/system/providers/asana/example_asana.py +.. exampleinclude:: /../../providers/tests/system/asana/example_asana.py :language: python :dedent: 4 :start-after: [START asana_example_dag] diff --git a/docs/apache-airflow-providers-atlassian-jira/changelog.rst b/docs/apache-airflow-providers-atlassian-jira/changelog.rst index 6e1c12bdf403..5ea7dfff3dea 100644 --- a/docs/apache-airflow-providers-atlassian-jira/changelog.rst +++ b/docs/apache-airflow-providers-atlassian-jira/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/atlassian/jira/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-celery/changelog.rst b/docs/apache-airflow-providers-celery/changelog.rst index b9abe5dbf53e..c1b38edc7d93 100644 --- a/docs/apache-airflow-providers-celery/changelog.rst +++ b/docs/apache-airflow-providers-celery/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/celery/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/celery/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cloudant/changelog.rst b/docs/apache-airflow-providers-cloudant/changelog.rst index 854f5fda01e2..d969e082c17b 100644 --- a/docs/apache-airflow-providers-cloudant/changelog.rst +++ b/docs/apache-airflow-providers-cloudant/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cloudant/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cloudant/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst b/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst index 493abbcdb848..6ad86cec6753 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cncf/kubernetes/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index 1db81c09a3f6..e0da7d26e1b4 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -59,14 +59,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/cncf/kubernetes/index> + System Tests <_api/tests/system/cncf/kubernetes/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst b/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst index 57d0a2674221..a85a79371288 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst @@ -108,21 +108,21 @@ With these requirements in mind, here are some examples of basic ``pod_template_ Storing DAGs in the image: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml :language: yaml :start-after: [START template_with_dags_in_image] :end-before: [END template_with_dags_in_image] Storing DAGs in a ``persistentVolume``: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml :language: yaml :start-after: [START template_with_dags_in_volume] :end-before: [END template_with_dags_in_volume] Pulling DAGs from ``git``: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml :language: yaml :start-after: [START git_sync_template] :end-before: [END git_sync_template] diff --git a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst index 6d5b1414488a..2268a8655c98 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst @@ -102,7 +102,7 @@ Using this method will ensure correctness and type safety. While we have removed almost all Kubernetes convenience classes, we have kept the :class:`~airflow.providers.cncf.kubernetes.secret.Secret` class to simplify the process of generating secret volumes/env variables. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_cluster_resources] :end-before: [END howto_operator_k8s_cluster_resources] @@ -135,21 +135,21 @@ Create the Secret using ``kubectl``: Then use it in your pod like so: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_private_image] :end-before: [END howto_operator_k8s_private_image] Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_k8s_private_image_async] :end-before: [END howto_operator_k8s_private_image_async] Example to fetch and display container log periodically -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_async_log] :end-before: [END howto_operator_async_log] @@ -168,7 +168,7 @@ alongside the Pod. The Pod must write the XCom value into this location at the ` See the following example on how this occurs: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_write_xcom] :end-before: [END howto_operator_k8s_write_xcom] @@ -177,7 +177,7 @@ See the following example on how this occurs: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_k8s_write_xcom_async] :end-before: [END howto_operator_k8s_write_xcom_async] @@ -621,7 +621,7 @@ request that dynamically launches this Job. Users can specify a kubeconfig file using the ``config_file`` parameter, otherwise the operator will default to ``~/.kube/config``. It also allows users to supply a template YAML file using the ``job_template_file`` parameter. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_k8s_job] @@ -629,7 +629,7 @@ to ``~/.kube/config``. It also allows users to supply a template YAML file using The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesJobOperator` also supports deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_k8s_job_deferrable] @@ -656,7 +656,7 @@ KubernetesDeleteJobOperator The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesDeleteJobOperator` allows you to delete Jobs on a Kubernetes cluster. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_k8s_job] @@ -671,7 +671,7 @@ KubernetesPatchJobOperator The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesPatchJobOperator` allows you to update Jobs on a Kubernetes cluster. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_update_job] diff --git a/docs/apache-airflow-providers-cohere/changelog.rst b/docs/apache-airflow-providers-cohere/changelog.rst index f4e96e1909fb..aa7753b85cd6 100644 --- a/docs/apache-airflow-providers-cohere/changelog.rst +++ b/docs/apache-airflow-providers-cohere/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cohere/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cohere/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cohere/index.rst b/docs/apache-airflow-providers-cohere/index.rst index 8909c2bd72db..3ae62208a98f 100644 --- a/docs/apache-airflow-providers-cohere/index.rst +++ b/docs/apache-airflow-providers-cohere/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/cohere/index> + System Tests <_api/tests/system/cohere/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-cohere/operators/embedding.rst b/docs/apache-airflow-providers-cohere/operators/embedding.rst index b765fe8e9d07..18adf619ce6d 100644 --- a/docs/apache-airflow-providers-cohere/operators/embedding.rst +++ b/docs/apache-airflow-providers-cohere/operators/embedding.rst @@ -33,7 +33,7 @@ connect to your account. Example Code: ------------- -.. exampleinclude:: /../../tests/system/providers/cohere/example_cohere_embedding_operator.py +.. exampleinclude:: /../../providers/tests/system/cohere/example_cohere_embedding_operator.py :language: python :dedent: 4 :start-after: [START howto_operator_cohere_embedding] diff --git a/docs/apache-airflow-providers-common-compat/changelog.rst b/docs/apache-airflow-providers-common-compat/changelog.rst index 32438992010a..074a237efcc1 100644 --- a/docs/apache-airflow-providers-common-compat/changelog.rst +++ b/docs/apache-airflow-providers-common-compat/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/compat/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/compat/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-io/changelog.rst b/docs/apache-airflow-providers-common-io/changelog.rst index 16dec7d266c3..8662eabd816e 100644 --- a/docs/apache-airflow-providers-common-io/changelog.rst +++ b/docs/apache-airflow-providers-common-io/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/io/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/io/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-io/index.rst b/docs/apache-airflow-providers-common-io/index.rst index 606b6c5161bc..ac0f1433d20c 100644 --- a/docs/apache-airflow-providers-common-io/index.rst +++ b/docs/apache-airflow-providers-common-io/index.rst @@ -51,14 +51,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/common/io/index> + System Tests <_api/tests/system/common/io/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-common-io/operators.rst b/docs/apache-airflow-providers-common-io/operators.rst index 12b4a1c207ff..fee5bb8eac5f 100644 --- a/docs/apache-airflow-providers-common-io/operators.rst +++ b/docs/apache-airflow-providers-common-io/operators.rst @@ -40,7 +40,7 @@ Otherwise the data will be streamed from the source to the destination. The example below shows how to instantiate the FileTransferOperator task. -.. exampleinclude:: /../../tests/system/providers/common/io/example_file_transfer_local_to_s3.py +.. exampleinclude:: /../../providers/tests/system/common/io/example_file_transfer_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-common-sql/changelog.rst b/docs/apache-airflow-providers-common-sql/changelog.rst index b1f08fee1ab6..d071c9fc1a1e 100644 --- a/docs/apache-airflow-providers-common-sql/changelog.rst +++ b/docs/apache-airflow-providers-common-sql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/sql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/sql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-sql/index.rst b/docs/apache-airflow-providers-common-sql/index.rst index f764b57a8b68..2d554bf3da26 100644 --- a/docs/apache-airflow-providers-common-sql/index.rst +++ b/docs/apache-airflow-providers-common-sql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/common/sql/index> + System Tests <_api/tests/system/common/sql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-common-sql/operators.rst b/docs/apache-airflow-providers-common-sql/operators.rst index bc725be418c0..e6e24eefb001 100644 --- a/docs/apache-airflow-providers-common-sql/operators.rst +++ b/docs/apache-airflow-providers-common-sql/operators.rst @@ -38,7 +38,7 @@ different databases. Parameters of the operators are: The example below shows how to instantiate the SQLExecuteQueryOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] @@ -101,7 +101,7 @@ empty tables to return valid integers. The below example demonstrates how to instantiate the SQLColumnCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_column_check] @@ -141,7 +141,7 @@ checks. The below example demonstrates how to instantiate the SQLTableCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_table_check] diff --git a/docs/apache-airflow-providers-databricks/changelog.rst b/docs/apache-airflow-providers-databricks/changelog.rst index 87771d0f5199..8e4f5f126efd 100644 --- a/docs/apache-airflow-providers-databricks/changelog.rst +++ b/docs/apache-airflow-providers-databricks/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/databricks/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/databricks/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index 5a124818ea4d..32cd75c0ef2b 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/databricks/index> + System Tests <_api/tests/system/databricks/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-databricks/operators/copy_into.rst b/docs/apache-airflow-providers-databricks/operators/copy_into.rst index 71a3fa9e89ad..56eb20c66228 100644 --- a/docs/apache-airflow-providers-databricks/operators/copy_into.rst +++ b/docs/apache-airflow-providers-databricks/operators/copy_into.rst @@ -46,7 +46,7 @@ Importing CSV data An example usage of the DatabricksCopyIntoOperator to import CSV data into a table is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_copy_into] :end-before: [END howto_operator_databricks_copy_into] diff --git a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst b/docs/apache-airflow-providers-databricks/operators/jobs_create.rst index 7e6765eba420..621423f83f32 100644 --- a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst +++ b/docs/apache-airflow-providers-databricks/operators/jobs_create.rst @@ -66,7 +66,7 @@ Specifying parameters as JSON An example usage of the DatabricksCreateJobsOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_jobs_create_json] :end-before: [END howto_operator_databricks_jobs_create_json] @@ -76,7 +76,7 @@ Using named parameters You can also use named parameters to initialize the operator and run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_jobs_create_named] :end-before: [END howto_operator_databricks_jobs_create_named] @@ -87,7 +87,7 @@ Pairing with DatabricksRunNowOperator You can use the ``job_id`` that is returned by the DatabricksCreateJobsOperator in the return_value XCom as an argument to the DatabricksRunNowOperator to run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_run_now] :end-before: [END howto_operator_databricks_run_now] diff --git a/docs/apache-airflow-providers-databricks/operators/notebook.rst b/docs/apache-airflow-providers-databricks/operators/notebook.rst index b87d0d20e6f5..bf7b04ca74ca 100644 --- a/docs/apache-airflow-providers-databricks/operators/notebook.rst +++ b/docs/apache-airflow-providers-databricks/operators/notebook.rst @@ -31,14 +31,14 @@ Examples Running a notebook in Databricks on a new cluster ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_notebook_new_cluster] :end-before: [END howto_operator_databricks_notebook_new_cluster] Running a notebook in Databricks on an existing cluster ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_notebook_existing_cluster] :end-before: [END howto_operator_databricks_notebook_existing_cluster] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_create.rst b/docs/apache-airflow-providers-databricks/operators/repos_create.rst index 6611a51cd6c1..6b60eae27870 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_create.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_create.rst @@ -63,7 +63,7 @@ Create a Databricks Repo An example usage of the DatabricksReposCreateOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_create] :end-before: [END howto_operator_databricks_repo_create] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst b/docs/apache-airflow-providers-databricks/operators/repos_delete.rst index 74d4b62972a1..3186dd131dff 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_delete.rst @@ -55,7 +55,7 @@ Deleting Databricks Repo by specifying path An example usage of the DatabricksReposDeleteOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_delete] :end-before: [END howto_operator_databricks_repo_delete] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_update.rst b/docs/apache-airflow-providers-databricks/operators/repos_update.rst index 56af4edabbcf..6893ee0107e8 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_update.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_update.rst @@ -60,7 +60,7 @@ Updating Databricks Repo by specifying path An example usage of the DatabricksReposUpdateOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_update] :end-before: [END howto_operator_databricks_repo_update] diff --git a/docs/apache-airflow-providers-databricks/operators/sql.rst b/docs/apache-airflow-providers-databricks/operators/sql.rst index 55bbf6475856..33acacae1c6b 100644 --- a/docs/apache-airflow-providers-databricks/operators/sql.rst +++ b/docs/apache-airflow-providers-databricks/operators/sql.rst @@ -49,7 +49,7 @@ Selecting data An example usage of the DatabricksSqlOperator to select data from a table is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_select] :end-before: [END howto_operator_databricks_sql_select] @@ -59,7 +59,7 @@ Selecting data into a file An example usage of the DatabricksSqlOperator to select data from a table and store in a file is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_select_file] :end-before: [END howto_operator_databricks_sql_select_file] @@ -69,7 +69,7 @@ Executing multiple statements An example usage of the DatabricksSqlOperator to perform multiple SQL statements is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_multiple] :end-before: [END howto_operator_databricks_sql_multiple] @@ -80,7 +80,7 @@ Executing multiple statements from a file An example usage of the DatabricksSqlOperator to perform statements from a file is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_multiple_file] :end-before: [END howto_operator_databricks_sql_multiple_file] @@ -107,7 +107,7 @@ Examples -------- Configuring Databricks connection to be used with the Sensor. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_connection_setup] @@ -115,7 +115,7 @@ Configuring Databricks connection to be used with the Sensor. Poking the specific table with the SQL statement: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_sql] @@ -154,7 +154,7 @@ Examples -------- Configuring Databricks connection to be used with the Sensor. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_connection_setup] @@ -162,7 +162,7 @@ Configuring Databricks connection to be used with the Sensor. Poking the specific table for existence of data/partition: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_partition] diff --git a/docs/apache-airflow-providers-databricks/operators/submit_run.rst b/docs/apache-airflow-providers-databricks/operators/submit_run.rst index 706920458c64..10548583cfa3 100644 --- a/docs/apache-airflow-providers-databricks/operators/submit_run.rst +++ b/docs/apache-airflow-providers-databricks/operators/submit_run.rst @@ -113,7 +113,7 @@ Specifying parameters as JSON An example usage of the DatabricksSubmitRunOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_json] :end-before: [END howto_operator_databricks_json] @@ -123,7 +123,7 @@ Using named parameters You can also use named parameters to initialize the operator and run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_named] :end-before: [END howto_operator_databricks_named] diff --git a/docs/apache-airflow-providers-databricks/operators/task.rst b/docs/apache-airflow-providers-databricks/operators/task.rst index 476e72c494b9..331481d915c4 100644 --- a/docs/apache-airflow-providers-databricks/operators/task.rst +++ b/docs/apache-airflow-providers-databricks/operators/task.rst @@ -33,14 +33,14 @@ Examples Running a notebook in Databricks using DatabricksTaskOperator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_task_notebook] :end-before: [END howto_operator_databricks_task_notebook] Running a SQL query in Databricks using DatabricksTaskOperator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_task_sql] :end-before: [END howto_operator_databricks_task_sql] diff --git a/docs/apache-airflow-providers-databricks/operators/workflow.rst b/docs/apache-airflow-providers-databricks/operators/workflow.rst index 6da38add6669..b5c81050143a 100644 --- a/docs/apache-airflow-providers-databricks/operators/workflow.rst +++ b/docs/apache-airflow-providers-databricks/operators/workflow.rst @@ -45,7 +45,7 @@ Examples Example of what a DAG looks like with a DatabricksWorkflowTaskGroup ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_workflow.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_workflow.py :language: python :start-after: [START howto_databricks_workflow_notebook] :end-before: [END howto_databricks_workflow_notebook] diff --git a/docs/apache-airflow-providers-datadog/changelog.rst b/docs/apache-airflow-providers-datadog/changelog.rst index e392f2d04075..43f0ce3e63a0 100644 --- a/docs/apache-airflow-providers-datadog/changelog.rst +++ b/docs/apache-airflow-providers-datadog/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/datadog/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/datadog/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dbt-cloud/changelog.rst b/docs/apache-airflow-providers-dbt-cloud/changelog.rst index f4b14c26b003..be4203ad0c94 100644 --- a/docs/apache-airflow-providers-dbt-cloud/changelog.rst +++ b/docs/apache-airflow-providers-dbt-cloud/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/dbt/cloud/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dbt-cloud/index.rst b/docs/apache-airflow-providers-dbt-cloud/index.rst index f9f214af8323..82d72999eaee 100644 --- a/docs/apache-airflow-providers-dbt-cloud/index.rst +++ b/docs/apache-airflow-providers-dbt-cloud/index.rst @@ -53,14 +53,14 @@ an Integrated Developer Environment (IDE). :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/dbt/cloud/index> + System Tests <_api/tests/system/dbt/cloud/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-dbt-cloud/operators.rst b/docs/apache-airflow-providers-dbt-cloud/operators.rst index af5b900d2340..eaa285f6d408 100644 --- a/docs/apache-airflow-providers-dbt-cloud/operators.rst +++ b/docs/apache-airflow-providers-dbt-cloud/operators.rst @@ -67,7 +67,7 @@ The below examples demonstrate how to instantiate DbtCloudRunJobOperator tasks w asynchronous waiting for run termination, respectively. To note, the ``account_id`` for the operators is referenced within the ``default_args`` of the example DAG. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job] @@ -76,7 +76,7 @@ referenced within the ``default_args`` of the example DAG. This next example also shows how to pass in custom runtime configuration (in this case for ``threads_override``) via the ``additional_run_config`` dictionary. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_async] @@ -95,7 +95,7 @@ In the example below, the ``run_id`` value in the example below comes from the o DbtCloudRunJobOperator task by utilizing the ``.output`` property exposed for all operators. Also, to note, the ``account_id`` for the task is referenced within the ``default_args`` of the example DAG. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_sensor] @@ -104,7 +104,7 @@ the ``account_id`` for the task is referenced within the ``default_args`` of the Also, you can poll for status of the job run asynchronously using ``deferrable`` mode. In this mode, worker slots are freed up while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_sensor_deferred] @@ -125,7 +125,7 @@ downloaded. For more information on dbt Cloud artifacts, reference `this documentation `__. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_get_artifact] @@ -146,7 +146,7 @@ If a ``project_id`` is supplied, only jobs pertaining to this project id will be For more information on dbt Cloud list jobs, reference `this documentation `__. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_list_jobs] diff --git a/docs/apache-airflow-providers-dingding/changelog.rst b/docs/apache-airflow-providers-dingding/changelog.rst index 43f13ec33466..7bf7561b35db 100644 --- a/docs/apache-airflow-providers-dingding/changelog.rst +++ b/docs/apache-airflow-providers-dingding/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/dingding/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/dingding/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst index be557b9bc5a1..fd799c323c74 100644 --- a/docs/apache-airflow-providers-dingding/index.rst +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/dingding/index> + System Tests <_api/tests/system/dingding/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-dingding/operators.rst b/docs/apache-airflow-providers-dingding/operators.rst index dc3ca1b5ffe9..600c321b9f1f 100644 --- a/docs/apache-airflow-providers-dingding/operators.rst +++ b/docs/apache-airflow-providers-dingding/operators.rst @@ -38,7 +38,7 @@ Basic Usage Use the :class:`~airflow.providers.dingding.operators.dingding.DingdingOperator` to send message through `DingTalk Custom Robot `__: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding] @@ -51,7 +51,7 @@ Remind users in message Use parameters ``at_mobiles`` and ``at_all`` to remind specific users when you send message, ``at_mobiles`` will be ignored When ``at_all`` is set to ``True``: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding_remind_users] @@ -66,7 +66,7 @@ can send rich text messages including link, markdown, actionCard and feedCard through `DingTalk Custom Robot `__. A rich text message can not remind specific users except by using markdown type message: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding_rich_text] @@ -80,7 +80,7 @@ Sending messages from a Task callback and then pass the function to ``sla_miss_callback``, ``on_success_callback``, ``on_failure_callback``, or ``on_retry_callback``. Here we use ``on_failure_callback`` as an example: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :start-after: [START howto_operator_dingding_failure_callback] :end-before: [END howto_operator_dingding_failure_callback] diff --git a/docs/apache-airflow-providers-discord/changelog.rst b/docs/apache-airflow-providers-discord/changelog.rst index c5056231dabe..aec69ca9c5b1 100644 --- a/docs/apache-airflow-providers-discord/changelog.rst +++ b/docs/apache-airflow-providers-discord/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/discord/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/discord/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-docker/changelog.rst b/docs/apache-airflow-providers-docker/changelog.rst index 46a39571be6e..910d96f6cbe0 100644 --- a/docs/apache-airflow-providers-docker/changelog.rst +++ b/docs/apache-airflow-providers-docker/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/docker/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/docker/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-docker/decorators/docker.rst b/docs/apache-airflow-providers-docker/decorators/docker.rst index cfcd51860920..df5b237bfa71 100644 --- a/docs/apache-airflow-providers-docker/decorators/docker.rst +++ b/docs/apache-airflow-providers-docker/decorators/docker.rst @@ -162,7 +162,7 @@ ulimits Usage Example ------------- -.. exampleinclude:: /../../tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +.. exampleinclude:: /../../providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py :language: python :start-after: [START transform_docker] :end-before: [END transform_docker] diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst index 24593b875588..4f5f263f263d 100644 --- a/docs/apache-airflow-providers-docker/index.rst +++ b/docs/apache-airflow-providers-docker/index.rst @@ -42,14 +42,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/docker/index> + System Tests <_api/tests/system/docker/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-edge/changelog.rst b/docs/apache-airflow-providers-edge/changelog.rst index 4a87ccc753b0..46bd53ed4ccc 100644 --- a/docs/apache-airflow-providers-edge/changelog.rst +++ b/docs/apache-airflow-providers-edge/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/edge/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/edge/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-elasticsearch/changelog.rst b/docs/apache-airflow-providers-elasticsearch/changelog.rst index 496bc18ce366..840359e3b0a4 100644 --- a/docs/apache-airflow-providers-elasticsearch/changelog.rst +++ b/docs/apache-airflow-providers-elasticsearch/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/elasticsearch/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/elasticsearch/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst b/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst index c068abe1a3c7..8097b8bd61c4 100644 --- a/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst +++ b/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst @@ -72,7 +72,7 @@ For example: export AIRFLOW_CONN_ELASTICSEARCH_DEFAULT='elasticsearch://elasticsearchlogin:elasticsearchpassword@elastic.co:80/http' -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :dedent: 4 :start-after: [START howto_elasticsearch_query] diff --git a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst index ff1f925182ce..537b4973b41c 100644 --- a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst +++ b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst @@ -36,7 +36,7 @@ es_conn_args Usage Example --------------------- -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :start-after: [START howto_elasticsearch_python_hook] :end-before: [END howto_elasticsearch_python_hook] diff --git a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst index 6021f4ac5dfe..084d445cb0ba 100644 --- a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst +++ b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst @@ -26,7 +26,7 @@ Elasticsearch Hook that interact with Elasticsearch through the elasticsearch-db Usage Example --------------------- -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :start-after: [START howto_elasticsearch_query] :end-before: [END howto_elasticsearch_query] diff --git a/docs/apache-airflow-providers-elasticsearch/index.rst b/docs/apache-airflow-providers-elasticsearch/index.rst index 727d852d7cf2..6aeca1e89e97 100644 --- a/docs/apache-airflow-providers-elasticsearch/index.rst +++ b/docs/apache-airflow-providers-elasticsearch/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/elasticsearch/index> + System Tests <_api/tests/system/elasticsearch/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-exasol/changelog.rst b/docs/apache-airflow-providers-exasol/changelog.rst index 998d537e49b4..b1f41f01d8ee 100644 --- a/docs/apache-airflow-providers-exasol/changelog.rst +++ b/docs/apache-airflow-providers-exasol/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/exasol/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/exasol/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-fab/auth-manager/access-control.rst b/docs/apache-airflow-providers-fab/auth-manager/access-control.rst index 5292c3f1f629..914635942b7c 100644 --- a/docs/apache-airflow-providers-fab/auth-manager/access-control.rst +++ b/docs/apache-airflow-providers-fab/auth-manager/access-control.rst @@ -46,7 +46,7 @@ Viewer ^^^^^^ ``Viewer`` users have limited read permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_viewer_perms] :end-before: [END security_viewer_perms] @@ -55,7 +55,7 @@ User ^^^^ ``User`` users have ``Viewer`` permissions plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_user_perms] :end-before: [END security_user_perms] @@ -64,7 +64,7 @@ Op ^^ ``Op`` users have ``User`` permissions plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_op_perms] :end-before: [END security_op_perms] @@ -74,7 +74,7 @@ Admin ``Admin`` users have all possible permissions, including granting or revoking permissions from other users. ``Admin`` users have ``Op`` permission plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_admin_perms] :end-before: [END security_admin_perms] diff --git a/docs/apache-airflow-providers-fab/changelog.rst b/docs/apache-airflow-providers-fab/changelog.rst index c6bdcaa11e75..41fc2e14c26a 100644 --- a/docs/apache-airflow-providers-fab/changelog.rst +++ b/docs/apache-airflow-providers-fab/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/fab/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/fab/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-facebook/changelog.rst b/docs/apache-airflow-providers-facebook/changelog.rst index e6717758dc28..e84fbb0ce309 100644 --- a/docs/apache-airflow-providers-facebook/changelog.rst +++ b/docs/apache-airflow-providers-facebook/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/facebook/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/facebook/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ftp/changelog.rst b/docs/apache-airflow-providers-ftp/changelog.rst index c9c69b3a7d2a..36f8c3d14951 100644 --- a/docs/apache-airflow-providers-ftp/changelog.rst +++ b/docs/apache-airflow-providers-ftp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ftp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ftp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ftp/index.rst b/docs/apache-airflow-providers-ftp/index.rst index 269f712d8d25..25dfa5ef7d75 100644 --- a/docs/apache-airflow-providers-ftp/index.rst +++ b/docs/apache-airflow-providers-ftp/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ftp/index> + System Tests <_api/tests/system/ftp/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources @@ -65,7 +65,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ftp/index> + System Tests <_api/tests/system/ftp/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-ftp/operators/index.rst b/docs/apache-airflow-providers-ftp/operators/index.rst index 48933b9ed8a2..3e2f6592e1c3 100644 --- a/docs/apache-airflow-providers-ftp/operators/index.rst +++ b/docs/apache-airflow-providers-ftp/operators/index.rst @@ -33,7 +33,7 @@ For parameter definition take a look at :class:`~airflow.providers.ftp.operators The below example shows how to use the FTPFileTransmitOperator to transfer a locally stored file to a remote FTP Server: -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftp_put] @@ -41,7 +41,7 @@ The below example shows how to use the FTPFileTransmitOperator to transfer a loc The below example shows how to use the FTPFileTransmitOperator to pull a file from a remote FTP Server. -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftp_get] @@ -63,7 +63,7 @@ For parameter definition take a look at :class:`~airflow.providers.ftp.operators The below example shows how to use the FTPSFileTransmitOperator to transfer a locally stored file to a remote FTPS Server: -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftps_put] @@ -71,7 +71,7 @@ The below example shows how to use the FTPSFileTransmitOperator to transfer a lo The below example shows how to use the FTPSFileTransmitOperator to pull a file from a remote FTPS Server. -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftps_get] diff --git a/docs/apache-airflow-providers-github/changelog.rst b/docs/apache-airflow-providers-github/changelog.rst index 93bddd8d65fb..231425db490d 100644 --- a/docs/apache-airflow-providers-github/changelog.rst +++ b/docs/apache-airflow-providers-github/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/github/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/github/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-github/index.rst b/docs/apache-airflow-providers-github/index.rst index eb8fc3249f98..847dba31d106 100644 --- a/docs/apache-airflow-providers-github/index.rst +++ b/docs/apache-airflow-providers-github/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/github/index> + System Tests <_api/tests/system/github/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-github/operators/index.rst b/docs/apache-airflow-providers-github/operators/index.rst index 1b2bf63894af..448fb8dc3dc2 100644 --- a/docs/apache-airflow-providers-github/operators/index.rst +++ b/docs/apache-airflow-providers-github/operators/index.rst @@ -33,7 +33,7 @@ You can further process the result using An example of Listing all Repositories owned by a user, **client.get_user().get_repos()** can be implemented as following: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_operator_list_repos_github] @@ -43,7 +43,7 @@ An example of Listing all Repositories owned by a user, **client.get_user().get_ An example of Listing Tags in a Repository, **client.get_repo(full_name_or_id='apache/airflow').get_tags()** can be implemented as following: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_operator_list_tags_github] @@ -64,7 +64,7 @@ a Tag in `GitHub `__. An example for tag **v1.0**: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_tag_sensor_github] @@ -73,7 +73,7 @@ An example for tag **v1.0**: Similar Functionality can be achieved by directly using :class:`~from airflow.providers.github.sensors.github.GithubSensor`. -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_sensor_github] diff --git a/docs/apache-airflow-providers-google/changelog.rst b/docs/apache-airflow-providers-google/changelog.rst index d18b3ef2017a..f078efd1e3af 100644 --- a/docs/apache-airflow-providers-google/changelog.rst +++ b/docs/apache-airflow-providers-google/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/google/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/google/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-google/example-dags.rst b/docs/apache-airflow-providers-google/example-dags.rst index 49f4fbf327f1..15245b948c6c 100644 --- a/docs/apache-airflow-providers-google/example-dags.rst +++ b/docs/apache-airflow-providers-google/example-dags.rst @@ -19,10 +19,10 @@ Example DAGs ============ You can learn how to use Google integrations by analyzing the source code of the example DAGs: -* `Google Ads `__ +* `Google Ads `__ * `Google Cloud (legacy) `__ -* `Google Cloud `__ -* `Google Firebase `__ +* `Google Cloud `__ +* `Google Firebase `__ * `Google Marketing Platform `__ -* `Google Workplace `__ (formerly Google Suite) -* `Google LevelDB `__ +* `Google Workplace `__ (formerly Google Suite) +* `Google LevelDB `__ diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index 05e05725aa17..172b8d0931e7 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/google/index> + System Tests <_api/tests/system/google/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-google/operators/ads.rst b/docs/apache-airflow-providers-google/operators/ads.rst index 8e228bc0fde9..af5fe5913a9d 100644 --- a/docs/apache-airflow-providers-google/operators/ads.rst +++ b/docs/apache-airflow-providers-google/operators/ads.rst @@ -32,7 +32,7 @@ Google Ads to GCS To query the Google Ads API and generate a CSV report of the results use ``GoogleAdsToGcsOperator``. -.. exampleinclude:: /../../tests/system/providers/google/ads/example_ads.py +.. exampleinclude:: /../../providers/tests/system/google/ads/example_ads.py :language: python :dedent: 4 :start-after: [START howto_google_ads_to_gcs_operator] @@ -58,7 +58,7 @@ Upload Google Ads Accounts to GCS To upload Google Ads accounts to Google Cloud Storage bucket use the ``GoogleAdsListAccountsOperator``. -.. exampleinclude:: /../../tests/system/providers/google/ads/example_ads.py +.. exampleinclude:: /../../providers/tests/system/google/ads/example_ads.py :language: python :dedent: 4 :start-after: [START howto_ads_list_accounts_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/automl.rst b/docs/apache-airflow-providers-google/operators/cloud/automl.rst index 8a92f49ac34f..ebfe3ca501f4 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/automl.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/automl.rst @@ -46,7 +46,7 @@ All the functionality of legacy AutoML Natural Language, Vision, Video Intellige available on the Vertex AI platform. Please use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.CreateDatasetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_dataset] @@ -55,7 +55,7 @@ available on the Vertex AI platform. Please use After creating a dataset you can use it to import some data using :class:`~airflow.providers.google.cloud.operators.automl.AutoMLImportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_import_data] @@ -69,7 +69,7 @@ All the functionality of legacy AutoML Natural Language, Vision, Video Intellige available on the Vertex AI platform. Please use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.UpdateDatasetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_update_dataset_operator] @@ -111,7 +111,7 @@ available on the Vertex AI platform. Please use You can find example on how to use VertexAI operators for AutoML Vision classification here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_vision_classification.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_vision_classification.py :language: python :dedent: 4 :start-after: [START howto_cloud_create_image_classification_training_job_operator] @@ -119,7 +119,7 @@ You can find example on how to use VertexAI operators for AutoML Vision classifi Example on how to use VertexAI operators for AutoML Video Intelligence classification you can find here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_video_classification.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_video_classification.py :language: python :dedent: 4 :start-after: [START howto_cloud_create_video_classification_training_job_operator] @@ -131,7 +131,7 @@ datasets. To create and import data to the dataset please use and :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ImportDataOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_translation.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_translation.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_model] @@ -145,7 +145,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_model_operator] @@ -159,7 +159,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeployModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_deploy_model_operator] @@ -173,7 +173,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_model_operator] @@ -190,13 +190,12 @@ To obtain predictions from Google Cloud AutoML model you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLBatchPredictOperator`. In the first case the model must be deployed. -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_translation.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_translation.py :language: python :dedent: 4 :start-after: [START howto_operator_prediction] :end-before: [END howto_operator_prediction] - Th :class:`~airflow.providers.google.cloud.operators.automl.AutoMLBatchPredictOperator` deprecated for tables, video intelligence, vision and natural language is deprecated and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator`, @@ -206,19 +205,19 @@ video intelligence, vision and natural language is deprecated and will be remove instead. You can find examples on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator] :end-before: [END how_to_cloud_vertex_ai_create_batch_prediction_job_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_batch_prediction_job_operator] :end-before: [END how_to_cloud_vertex_ai_list_batch_prediction_job_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_batch_prediction_job_operator] @@ -239,7 +238,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.ListDatasetsOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_dataset_operator] @@ -253,7 +252,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.DeleteDatasetOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_dataset_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst index 1635372a35d7..5e2c6689c236 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst @@ -43,7 +43,7 @@ Create dataset To create an empty dataset in a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_dataset] @@ -59,7 +59,7 @@ To get the details of an existing dataset you can use This operator returns a `Dataset Resource `__. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset] @@ -73,7 +73,7 @@ List tables in dataset To retrieve the list of tables in a given dataset use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset_tables] @@ -90,7 +90,7 @@ To update a table in BigQuery you can use The update method replaces the entire Table resource, whereas the patch method only replaces fields that are provided in the submitted Table resource. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table] @@ -107,7 +107,7 @@ To update a dataset in BigQuery you can use The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_dataset] @@ -121,7 +121,7 @@ Delete dataset To delete an existing dataset from a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_dataset] @@ -144,7 +144,7 @@ ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table] @@ -152,7 +152,7 @@ Storage must be a JSON file with the schema fields in it. You can use this operator to create a view on top of an existing table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_view] @@ -161,7 +161,7 @@ You can use this operator to create a view on top of an existing table. You can also use this operator to create a materialized view that periodically cache results of a query for increased performance and efficiency. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_materialized_view] @@ -180,7 +180,7 @@ Similarly to :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator` you can directly pass the schema fields in. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_external_table] @@ -188,7 +188,7 @@ you can directly pass the schema fields in. Or you may point the operator to a Google Cloud Storage object name where the schema is stored. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table_schema_json] @@ -213,7 +213,7 @@ nesting will a nested list where elements would represent the column values for that row. ``True`` - A Python list of dictionaries, where each dictionary represents a row. In each dictionary, the keys are the column names and the values are the corresponding values for those columns. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_get_data] @@ -224,7 +224,7 @@ The below example shows how to use in async (deferrable) mode. Note that a deferrable task requires the Triggerer to be running on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_data_async] @@ -241,7 +241,7 @@ To upsert a table you can use This operator either updates the existing table or creates a new, empty table in the given dataset. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_upsert_table] @@ -258,7 +258,7 @@ To update the schema of a table you can use This operator updates the schema field values supplied, while leaving the rest unchanged. This is useful for instance to set new field descriptions on an existing table schema. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table_schema] @@ -272,7 +272,7 @@ Delete table To delete an existing table you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_table] @@ -280,7 +280,7 @@ To delete an existing table you can use You can also use this operator to delete a view. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_view] @@ -288,7 +288,7 @@ You can also use this operator to delete a view. You can also use this operator to delete a materialized view. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_materialized_view] @@ -301,7 +301,7 @@ Execute BigQuery jobs Let's say you would like to execute the following query. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 0 :start-after: [START howto_operator_bigquery_query] @@ -311,7 +311,7 @@ To execute the SQL query in a specific BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryInsertJobOperator` with proper query job configuration that can be Jinja templated. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_insert_job] @@ -322,7 +322,7 @@ The below example shows how to use in async (deferrable) mode. Note that a deferrable task requires the Triggerer to be running on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_insert_job_async] @@ -334,7 +334,7 @@ For more information on types of BigQuery job please check If you want to include some files in your configuration you can use ``include`` clause of Jinja template language as follow: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_select_job] @@ -350,7 +350,7 @@ then it will reattach to the existing job. Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_insert_job_async] @@ -371,7 +371,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values return ``False`` the check is failed and errors out. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_check] @@ -379,7 +379,7 @@ return ``False`` the check is failed and errors out. Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_check_async] @@ -397,7 +397,7 @@ These operators expects a sql query that will return a single row. Each value on that first row is evaluated against ``pass_value`` which can be either a string or numeric value. If numeric, you can also specify ``tolerance``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_value_check] @@ -405,7 +405,7 @@ or numeric value. If numeric, you can also specify ``tolerance``. Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_value_check_async] @@ -421,7 +421,7 @@ tolerance of the ones from ``days_back`` before you can either use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator` or :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckAsyncOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_interval_check] @@ -429,7 +429,7 @@ tolerance of the ones from ``days_back`` before you can either use Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_interval_check_async] @@ -443,7 +443,7 @@ Check columns with predefined tests To check that columns pass user-configurable tests you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryColumnCheckOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_column_check] @@ -457,7 +457,7 @@ Check table level data quality To check that tables pass user-defined tests you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryTableCheckOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_table_check] @@ -475,7 +475,7 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table] @@ -483,13 +483,13 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_defered] :end-before: [END howto_sensor_bigquery_table_defered] -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_async_bigquery_table] @@ -501,7 +501,7 @@ Check that a Table Partition exists To check that a table exists and has a partition you can use. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition] @@ -511,13 +511,13 @@ For DAY partitioned tables, the partition_id parameter is a string on the "%Y%m% Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition_defered] :end-before: [END howto_sensor_bigquery_table_partition_defered] -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition_async] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst index 0d3663419baa..002716c8ef63 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst @@ -47,7 +47,7 @@ for example :class:`~airflow.providers.google.cloud.operators.bigquery_dts.BigQu scheduling option is present in passed configuration. If present then nothing is done, otherwise its value is set to ``True``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :start-after: [START howto_bigquery_dts_create_args] :end-before: [END howto_bigquery_dts_create_args] @@ -55,7 +55,7 @@ set to ``True``. You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_create_data_transfer] @@ -78,7 +78,7 @@ To delete DTS transfer configuration you can use Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_delete_data_transfer] @@ -99,7 +99,7 @@ Start manual transfer runs to be executed now with schedule_time equal to curren Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_start_transfer] @@ -112,7 +112,7 @@ parameters which allows you to dynamically determine values. To check if operation succeeded you can use :class:`~airflow.providers.google.cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_dts_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst index 870cfe50fce8..d77a68944724 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst @@ -42,7 +42,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_create] @@ -65,7 +65,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_update] @@ -85,7 +85,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_delete] @@ -105,7 +105,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_cluster_update] @@ -129,7 +129,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_create] @@ -157,7 +157,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_delete] @@ -182,7 +182,7 @@ timeout hits and does not raise any exception. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_wait_for_replication] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst index 2254ead25b1a..c4ae3db7de39 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst @@ -35,7 +35,7 @@ For more information about the Job object fields, visit `Google Cloud Batch Job A simple job configuration can look as follows: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 0 :start-after: [START howto_operator_batch_job_creation] @@ -44,7 +44,7 @@ A simple job configuration can look as follows: With this configuration we can submit the job: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchSubmitJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_submit_job] @@ -53,7 +53,7 @@ With this configuration we can submit the job: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchSubmitJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_submit_job_deferrable_mode] @@ -68,7 +68,7 @@ To list the tasks of a certain job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchListTasksOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_list_tasks] @@ -83,7 +83,7 @@ To list the jobs, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchListJobsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_list_jobs] @@ -98,7 +98,7 @@ To delete a job you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchDeleteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst index f7d91154095f..e5e8e601b030 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst @@ -46,7 +46,7 @@ Using the operator Cancel a build in progress with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCancelBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_cancel_build] @@ -74,7 +74,7 @@ Build configuration In order to trigger a build, it is necessary to pass the build configuration. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_gcp_create_build_from_storage_body] @@ -82,7 +82,7 @@ In order to trigger a build, it is necessary to pass the build configuration. In addition, a build can refer to source stored in `Google Cloud Source Repositories `__. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_create_build_from_repo_body] @@ -96,7 +96,7 @@ Using the operator Trigger a build is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage] @@ -104,7 +104,7 @@ Trigger a build is performed with the You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage_async] @@ -115,7 +115,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage_result] @@ -124,7 +124,7 @@ to be used by other operators. By default, after the build is created, it will wait for the build operation to complete. If there is no need to wait for complete, you can pass wait=False as example shown below. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_without_wait] @@ -132,7 +132,7 @@ you can pass wait=False as example shown below. You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_without_wait_async] @@ -141,7 +141,7 @@ You can use deferrable mode for this action in order to run the operator asynchr In order to start a build on Cloud Build you can use a build configuration file. A build config file defines the fields that are needed for Cloud Build to perform your tasks. You can write the build config file using the YAML or the JSON syntax. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_create_build_from_yaml_body] @@ -149,7 +149,7 @@ that are needed for Cloud Build to perform your tasks. You can write the build c You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_create_build_from_yaml_body_async] @@ -158,7 +158,7 @@ You can use deferrable mode for this action in order to run the operator asynchr In addition, a Cloud Build can refer to source stored in `Google Cloud Source Repositories `__. Once build has started, it ill build the code in source repositories. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_create_build_from_repo] @@ -166,7 +166,7 @@ Once build has started, it ill build the code in source repositories. You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_repo_async] @@ -189,7 +189,7 @@ Using the operator Creates a new Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildTriggerOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_trigger] @@ -216,7 +216,7 @@ Using the operator Deletes a new Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildDeleteBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_build_trigger] @@ -243,7 +243,7 @@ Using the operator Returns information about a previously requested build with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildGetBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_get_build] @@ -270,7 +270,7 @@ Using the operator Returns information about a Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildGetBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_get_build_trigger] @@ -297,7 +297,7 @@ Using the operator Lists all the existing Cloud Build triggers with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildListBuildTriggersOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_list_build_triggers] @@ -324,7 +324,7 @@ Using the operator Lists previously requested builds with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildListBuildsOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_list_builds] @@ -352,7 +352,7 @@ Using the operator Creates a new build based on the specified build with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildRetryBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_retry_build] @@ -379,7 +379,7 @@ Using the operator Runs a trigger at a particular source revision with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildRunBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_run_build_trigger] @@ -406,7 +406,7 @@ Using the operator Updates a Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildUpdateBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_trigger] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst index f8f00fbe6c54..7b05bbc6baa0 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst @@ -39,7 +39,7 @@ For more information about the available fields to pass when creating a environm A simple environment configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 0 :start-after: [START howto_operator_composer_simple_environment] @@ -48,7 +48,7 @@ A simple environment configuration can look as followed: With this configuration we can create the environment: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_create_composer_environment] @@ -57,7 +57,7 @@ With this configuration we can create the environment: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_create_composer_environment_deferrable_mode] @@ -70,7 +70,7 @@ To get a environment you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerGetEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_get_composer_environment] @@ -83,7 +83,7 @@ To get a environment you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerListEnvironmentsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_list_composer_environments] @@ -98,7 +98,7 @@ For more information on updateMask and other parameters take a look at `Cloud Co An example of a new service config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 0 :start-after: [START howto_operator_composer_update_environment] @@ -107,7 +107,7 @@ An example of a new service config and the updateMask: To update a service you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerUpdateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_update_composer_environment] @@ -116,7 +116,7 @@ To update a service you can use: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_update_composer_environment_deferrable_mode] @@ -129,7 +129,7 @@ To delete a service you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerDeleteEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_composer_environment] @@ -138,7 +138,7 @@ To delete a service you can use: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerDeleteEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_composer_environment_deferrable_mode] @@ -152,7 +152,7 @@ You can also list all supported Cloud Composer images: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerListImageVersionsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_composer_image_list] @@ -164,7 +164,7 @@ Run Airflow CLI commands You can run Airflow CLI commands in your environments, use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerRunAirflowCLICommandOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_run_airflow_cli_command] @@ -172,7 +172,7 @@ You can run Airflow CLI commands in your environments, use: or you can define the same operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_run_airflow_cli_command_deferrable_mode] @@ -184,7 +184,7 @@ Check if a DAG run has completed You can use sensor that checks if a DAG run has completed in your environments, use: :class:`~airflow.providers.google.cloud.sensors.cloud_composer.CloudComposerDAGRunSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_sensor_dag_run] @@ -192,7 +192,7 @@ You can use sensor that checks if a DAG run has completed in your environments, or you can define the same sensor in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_sensor_dag_run_deferrable_mode] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst index 7e7c04ba582d..9adeea88513a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst @@ -41,7 +41,7 @@ presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :start-after: [START howto_operator_instance] :end-before: [END howto_operator_instance] @@ -59,7 +59,7 @@ make a use of the service account listed under ``persistenceIamIdentity``. You can use :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator` operator to set permissions. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_set_acl_permission] @@ -76,7 +76,7 @@ Create instance Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance] @@ -87,7 +87,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_result] @@ -102,7 +102,7 @@ Delete instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance] @@ -120,7 +120,7 @@ Export instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_export_instance] @@ -138,7 +138,7 @@ Failover instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_failover_instance] @@ -156,7 +156,7 @@ Get instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance] @@ -174,7 +174,7 @@ Import instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_import_instance] @@ -192,7 +192,7 @@ List instances List a instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances] @@ -203,7 +203,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_result] @@ -217,7 +217,7 @@ Update instance Update a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance] @@ -236,7 +236,7 @@ Scale instance Scale a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_scale_instance] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst index 2fe980be0215..6f52020c283b 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst @@ -41,7 +41,7 @@ The object can be presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :start-after: [START howto_operator_memcached_instance] :end-before: [END howto_operator_memcached_instance] @@ -56,7 +56,7 @@ Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_memcached] @@ -72,7 +72,7 @@ Delete an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance_memcached] @@ -88,7 +88,7 @@ Get an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance_memcached] @@ -104,7 +104,7 @@ List instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_memcached] @@ -120,7 +120,7 @@ Updating an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance_memcached] @@ -138,7 +138,7 @@ and :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_update_and_apply_parameters_memcached] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst index 95a3b3a67667..65a28ea4f42c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst @@ -35,7 +35,7 @@ For more information about the Job object fields, visit `Google Cloud Run Job de A simple job configuration can be created with a Job object: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 0 :start-after: [START howto_cloud_run_job_instance_creation] @@ -43,7 +43,7 @@ A simple job configuration can be created with a Job object: or with a Python dictionary: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 0 :start-after: [START howto_cloud_run_job_dict_creation] @@ -52,7 +52,7 @@ or with a Python dictionary: You can create a Cloud Run Job with any of these configurations : :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunCreateJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_create_job] @@ -69,7 +69,7 @@ For more information about the Service object fields, visit `Google Cloud Run Se A simple service configuration can look as follows: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 0 :start-after: [START howto_operator_cloud_run_service_creation] @@ -79,7 +79,7 @@ A simple service configuration can look as follows: With this configuration we can create the service: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunCreateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_create_service] @@ -94,7 +94,7 @@ Delete a service With this configuration we can delete the service: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunDeleteServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_delete_service] @@ -110,7 +110,7 @@ To execute a job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job] @@ -120,7 +120,7 @@ or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job_deferrable_mode] @@ -130,7 +130,7 @@ You can also specify overrides that allow you to give a new entrypoint command t :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job_with_overrides] @@ -144,7 +144,7 @@ To update a job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunUpdateJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_update_job] @@ -161,7 +161,7 @@ To list the jobs, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunListJobsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_list_jobs] @@ -177,7 +177,7 @@ To delete a job you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunDeleteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_delete_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst index 42a32712867d..e1b572046d60 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst @@ -42,7 +42,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_create] @@ -50,7 +50,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_create_body] :end-before: [END howto_operator_cloudsql_db_create_body] @@ -58,7 +58,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_create_template_fields] @@ -87,7 +87,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_delete] @@ -96,7 +96,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_delete_template_fields] @@ -127,7 +127,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_patch] @@ -135,7 +135,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_patch_body] :end-before: [END howto_operator_cloudsql_db_patch_body] @@ -143,7 +143,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_patch_template_fields] @@ -174,7 +174,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_delete] @@ -183,7 +183,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_delete_template_fields] @@ -215,7 +215,7 @@ Arguments Example body defining the export operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_export_body] :end-before: [END howto_operator_cloudsql_export_body] @@ -226,7 +226,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export] @@ -234,7 +234,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_async] @@ -243,7 +243,7 @@ Also for all this action you can use operator in the deferrable mode: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_export_template_fields] @@ -268,7 +268,7 @@ To grant the service account with the appropriate WRITE permissions for the GCS you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_gcs_permissions] @@ -308,7 +308,7 @@ Arguments Example body defining the import operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_import_body] :end-before: [END howto_operator_cloudsql_import_body] @@ -319,7 +319,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import] @@ -328,7 +328,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_import_template_fields] @@ -353,7 +353,7 @@ To grant the service account with the appropriate READ permissions for the GCS o you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import_gcs_permissions] @@ -379,7 +379,7 @@ Arguments Example body defining the instance with failover replica: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_body] :end-before: [END howto_operator_cloudsql_create_body] @@ -390,7 +390,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_create] @@ -399,7 +399,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_create_template_fields] @@ -430,7 +430,7 @@ Arguments Example body defining the instance: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_patch_body] :end-before: [END howto_operator_cloudsql_patch_body] @@ -441,7 +441,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_patch] @@ -450,7 +450,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_patch_template_fields] @@ -484,7 +484,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_clone] @@ -493,7 +493,7 @@ Cloud connection used. Both variants are shown: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_clone_template_fields] @@ -558,14 +558,14 @@ NFS-like volumes in the same path for all the workers. Example connection definitions for all non-SSL connectivity. Note that all the components of the connection URI should be URL-encoded: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_connections] :end-before: [END howto_operator_cloudsql_query_connections] Similar connection definition for all SSL-enabled connectivity: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_connections] :end-before: [END howto_operator_cloudsql_query_connections] @@ -574,12 +574,12 @@ It is also possible to configure a connection via environment variable (note tha matches the :envvar:`AIRFLOW_CONN_{CONN_ID}` postfix uppercase if you are using a standard AIRFLOW notation for defining connection via environment variables): -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_connections_env] :end-before: [END howto_operator_cloudsql_query_connections_env] -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_connections_env] :end-before: [END howto_operator_cloudsql_query_connections_env] @@ -593,7 +593,7 @@ or the connection configured via environment variable (note that the connection :envvar:`AIRFLOW_CONN_{CONN_ID}` postfix uppercase if you are using a standard AIRFLOW notation for defining connection via environment variables): -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_operators] :end-before: [END howto_operator_cloudsql_query_operators] @@ -603,7 +603,7 @@ will be overridden. One of the ways to do so is specifying paths to each certifi Note that these files will be copied into a temporary location with minimal required permissions for security reasons. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_operators_ssl] :end-before: [END howto_operator_cloudsql_query_operators_ssl] @@ -614,7 +614,7 @@ format is: {"sslcert": "", "sslkey": "", "sslrootcert": ""} -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_operators_ssl_secret_id] :end-before: [END howto_operator_cloudsql_query_operators_ssl_secret_id] @@ -622,7 +622,7 @@ format is: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_query_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst index df3365631f1c..4779ce0dfcd9 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst @@ -62,17 +62,17 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_gcp] :end-before: [END howto_operator_gcp_transfer_create_job_body_gcp] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_aws] :end-before: [END howto_operator_gcp_transfer_create_job_body_aws] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_create_job] @@ -81,7 +81,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_create_template_fields] @@ -107,7 +107,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_delete_job] @@ -116,7 +116,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_delete_template_fields] @@ -142,7 +142,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_run_job] @@ -151,7 +151,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_run_template_fields] @@ -177,12 +177,12 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_update_job_body] :end-before: [END howto_operator_gcp_transfer_update_job_body] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_update_job] @@ -191,7 +191,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_update_template_fields] @@ -216,7 +216,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_cancel_operation] @@ -225,7 +225,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_cancel_template_fields] @@ -252,7 +252,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_get_operation] @@ -261,7 +261,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_get_template_fields] @@ -287,7 +287,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_list_operations] @@ -296,7 +296,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operations_list_template_fields] @@ -321,7 +321,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_pause_operation] @@ -330,7 +330,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_pause_template_fields] @@ -355,7 +355,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_resume_operation] @@ -364,7 +364,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_resume_template_fields] @@ -390,7 +390,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_wait_operation] @@ -399,7 +399,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_sensor_template_fields] @@ -418,7 +418,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_transfer_gcs_to_gcs] @@ -427,7 +427,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_gcs_to_gcs_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/compute.rst b/docs/apache-airflow-providers-google/operators/cloud/compute.rst index 1628db48e136..df10dbc479c5 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/compute.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute.rst @@ -39,7 +39,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert] @@ -48,7 +48,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_no_project_id] @@ -58,7 +58,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_insert_fields] @@ -84,7 +84,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_from_template] @@ -93,7 +93,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_from_template_no_project_id] @@ -103,7 +103,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_insert_from_template_fields] @@ -130,7 +130,7 @@ Using the operator You can create the operator without project id - project id will be retrieved from the Google Cloud connection id used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_no_project_id] @@ -140,7 +140,7 @@ from the Google Cloud connection id used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_delete_template_fields] @@ -166,7 +166,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start] @@ -175,7 +175,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start_no_project_id] @@ -185,7 +185,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_start_template_fields] @@ -212,7 +212,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop] @@ -221,7 +221,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop_no_project_id] @@ -230,7 +230,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_stop_template_fields] @@ -262,7 +262,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type] @@ -271,7 +271,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type_no_project_id] @@ -280,7 +280,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_set_machine_type_template_fields] @@ -310,7 +310,7 @@ The code to create the operator: You can create the operator without project id - project id will be retrieved from the Google Cloud connection used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_old_template_no_project_id] @@ -319,7 +319,7 @@ from the Google Cloud connection used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_delete_fields] @@ -346,7 +346,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_insert_template] @@ -355,7 +355,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_insert_template_no_project_id] @@ -364,7 +364,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_insert_fields] @@ -392,7 +392,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template] @@ -401,7 +401,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template_no_project_id] @@ -410,7 +410,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_copy_operator_template_fields] @@ -441,7 +441,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :start-after: [START howto_operator_gce_insert_igm] :end-before: [END howto_operator_gce_insert_igm] @@ -449,7 +449,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_igm_no_project_id] @@ -459,7 +459,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_insert_fields] @@ -491,7 +491,7 @@ Using the operator You can create the operator without project id - project id will be retrieved from the Google Cloud connection used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_igm_no_project_id] @@ -501,7 +501,7 @@ from the Google Cloud connection used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_delete_fields] @@ -532,7 +532,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template] @@ -541,7 +541,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template_no_project_id] @@ -551,7 +551,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_update_template_operator_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst index 8e50f01566af..b4e5c10bbf18 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst @@ -45,7 +45,7 @@ Please note that the target instance must allow tcp traffic on port 22. Below is the code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_ssh.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote_1] @@ -54,7 +54,7 @@ Below is the code to create the operator: You can also create the hook without project id - project id will be retrieved from the Google credentials used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_ssh.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote_2] diff --git a/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst index 76ac1abbd1d0..2147e9f1ab9a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst @@ -37,7 +37,7 @@ Create Stored Info-Type To create a custom info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_info_type] @@ -70,7 +70,7 @@ Update Stored Info-Type To update a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_info_type] @@ -85,7 +85,7 @@ Deleting Stored Info-Type To delete a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_info_type] @@ -112,7 +112,7 @@ Creating Template To create a inspection template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateInspectTemplateOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_inspect_template] @@ -139,7 +139,7 @@ Using Template To find potentially sensitive info using the inspection template we just created, we can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPInspectContentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_use_inspect_template] @@ -161,7 +161,7 @@ Deleting Template To delete the template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteInspectTemplateOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_inspect_template] @@ -236,7 +236,7 @@ Creating Job Trigger To create a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_job_trigger] @@ -261,7 +261,7 @@ Updating Job Trigger To update a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_job_trigger] @@ -275,7 +275,7 @@ Deleting Job Trigger To delete a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_job_trigger] @@ -297,7 +297,7 @@ Configuration information defines how you want the sensitive data de-identified. This config can either be saved and persisted in de-identification templates or defined in a :class:`~google.cloud.dlp_v2.types.DeidentifyConfig` object: -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py :language: python :start-after: [START dlp_deidentify_config_example] :end-before: [END dlp_deidentify_config_example] @@ -305,7 +305,7 @@ This config can either be saved and persisted in de-identification templates or To de-identify potentially sensitive information from a content item, you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeidentifyContentOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py :language: python :dedent: 4 :start-after: [START _howto_operator_dlp_deidentify_content] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst index eaad69735e45..10383b62ba25 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst @@ -59,7 +59,7 @@ operators. The ``CloudDataCatalogGetEntryOperator`` use Project ID, Entry Group ID, Entry ID to get the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry] @@ -71,7 +71,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_result] @@ -79,7 +79,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The ``CloudDataCatalogLookupEntryOperator`` use the resource name to get the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_linked_resource] @@ -91,7 +91,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_result] @@ -105,7 +105,7 @@ Creating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator` operator create the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs] @@ -119,7 +119,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry ID can be read with the ``entry_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs_result] @@ -133,7 +133,7 @@ Updating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator` operator update the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_entry] @@ -151,7 +151,7 @@ Deleting a entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator` operator delete the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry] @@ -180,7 +180,7 @@ Creating an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator` operator create the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group] @@ -194,7 +194,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry group ID can be read with the ``entry_group_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result] @@ -208,7 +208,7 @@ Getting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator` operator get the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group] @@ -220,7 +220,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group_result] @@ -234,7 +234,7 @@ Deleting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator` operator delete the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry_group] @@ -263,7 +263,7 @@ Creating a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template] @@ -277,7 +277,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag template ID can be read with the ``tag_template_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_result] @@ -291,7 +291,7 @@ Deleting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator` operator delete the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template] @@ -310,7 +310,7 @@ Getting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template] @@ -322,7 +322,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template_result] @@ -336,7 +336,7 @@ Updating a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator` operator update the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template] @@ -365,7 +365,7 @@ Creating a tag on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag] @@ -379,7 +379,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag ID can be read with the ``tag_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_result] @@ -393,7 +393,7 @@ Updating a tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator` operator update the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag] @@ -411,7 +411,7 @@ Deleting a tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator` operator delete the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag] @@ -429,7 +429,7 @@ Listing tags on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator` operator get list of the tags on the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags] @@ -441,7 +441,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags_result] @@ -467,7 +467,7 @@ Creating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field] @@ -481,7 +481,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created field ID can be read with the ``tag_template_field_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field_result] @@ -495,7 +495,7 @@ Renaming a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator` operator rename the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_rename_tag_template_field] @@ -513,7 +513,7 @@ Updating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template_field] @@ -532,7 +532,7 @@ Deleting a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator` operator delete the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template_field] @@ -553,7 +553,7 @@ operator searches Data Catalog for multiple resources like entries, tags that ma The ``query`` parameters should defined using `search syntax `__. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog] @@ -565,7 +565,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog_result] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst b/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst index a9eb98ea9a50..21895e4e6e67 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst @@ -78,7 +78,7 @@ This will create a new pipeline that will be visible on Dataflow Pipelines UI. Here is an example of how you can create a Dataflow Pipeline by running DataflowCreatePipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_create_dataflow_pipeline] @@ -87,7 +87,7 @@ Here is an example of how you can create a Dataflow Pipeline by running Dataflow To run a newly created pipeline you can use :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowRunPipelineOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_run_dataflow_pipeline] @@ -119,7 +119,7 @@ has the ability to download or available on the local filesystem (provide the ab Here is an example of creating and running a pipeline in Java with jar stored on GCS: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_jar_on_gcs] @@ -127,7 +127,7 @@ Here is an example of creating and running a pipeline in Java with jar stored on Here is an example of creating and running a pipeline in Java with jar stored on GCS in deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_jar_on_gcs_deferrable] @@ -135,7 +135,7 @@ Here is an example of creating and running a pipeline in Java with jar stored on Here is an example of creating and running a pipeline in Java with jar stored on local file system: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_local_jar] @@ -162,7 +162,7 @@ The ``py_system_site_packages`` argument specifies whether or not all the Python will be accessible within virtual environment (if ``py_requirements`` argument is specified), recommend avoiding unless the Dataflow job requires it. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_job] @@ -197,7 +197,7 @@ or Python file) and how it is written. In order for the Dataflow job to execute pipeline objects are not being waited upon (not calling ``waitUntilFinish`` or ``wait_until_finish`` on the ``PipelineResult`` in your application code). -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_job_async] @@ -221,7 +221,7 @@ Streaming execution To execute a streaming Dataflow job, ensure the streaming option is set (for Python) or read from an unbounded data source, such as Pub/Sub, in your pipeline (for Java). -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_streaming_python_job] @@ -249,7 +249,7 @@ See the `official documentation for Dataflow templates Here is an example of running a Dataflow job using a Classic Template with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_template_job] @@ -257,7 +257,7 @@ Here is an example of running a Dataflow job using a Classic Template with Also for this action you can use the operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_template_job_deferrable] @@ -269,7 +269,7 @@ See the `list of Google-provided templates that can be used with this operator Here is an example of running a Dataflow job using a Flex Template with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartFlexTemplateOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_flex_template_job] @@ -277,7 +277,7 @@ Here is an example of running a Dataflow job using a Flex Template with Also for this action you can use the operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_flex_template_job_deferrable] @@ -293,7 +293,7 @@ extensions for running Dataflow streaming jobs. Here is an example of running Dataflow SQL job with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartSqlJobOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_start_sql_job] @@ -317,7 +317,7 @@ This API can be used to define both streaming and batch pipelines. Here is an example of running Dataflow YAML job with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartYamlJobOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py :language: python :dedent: 4 :start-after: [START howto_operator_dataflow_start_yaml_job] @@ -325,7 +325,7 @@ Here is an example of running Dataflow YAML job with This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py :language: python :dedent: 4 :start-after: [START howto_operator_dataflow_start_yaml_job_def] @@ -347,7 +347,7 @@ To stop one or more Dataflow pipelines you can use Streaming pipelines are drained by default, setting ``drain_pipeline`` to ``False`` will cancel them instead. Provide ``job_id`` to stop a specific job, or ``job_name_prefix`` to stop all jobs with provided name prefix. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_dataflow_job] @@ -364,7 +364,7 @@ To delete a Dataflow pipeline you can use :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowDeletePipelineOperator`. Here is an example how you can use this operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_dataflow_pipeline] @@ -382,7 +382,7 @@ When job is triggered asynchronously sensors may be used to run checks for speci :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_status] @@ -390,7 +390,7 @@ When job is triggered asynchronously sensors may be used to run checks for speci This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_status_deferrable] @@ -398,7 +398,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobMetricsSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_metric] @@ -406,7 +406,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_metric_deferrable] @@ -414,7 +414,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobMessagesSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_message] @@ -422,7 +422,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_message_deferrable] @@ -430,7 +430,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobAutoScalingEventsSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_autoscaling_event] @@ -438,7 +438,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_autoscaling_event_deferrable] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataform.rst b/docs/apache-airflow-providers-google/operators/cloud/dataform.rst index 97d9d28e223e..09d8a6e6b8f9 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataform.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataform.rst @@ -40,7 +40,7 @@ Create repository for tracking your code in Dataform service. Example of usage c :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_repository] @@ -52,7 +52,7 @@ Create workspace for storing your code in Dataform service. Example of usage can :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateWorkspaceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_workspace] @@ -65,7 +65,7 @@ A simple configuration to create Compilation Result can look as followed: :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateCompilationResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_compilation_result] @@ -78,7 +78,7 @@ To get a Compilation Result you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformGetCompilationResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_get_compilation_result] @@ -95,13 +95,13 @@ We have possibility to run this operation in the sync mode and async, for async a sensor: :class:`~airflow.providers.google.cloud.operators.dataform.DataformWorkflowInvocationStateSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_create_workflow_invocation] :end-before: [END howto_operator_create_workflow_invocation] -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_create_workflow_invocation_async] @@ -114,7 +114,7 @@ To get a Workflow Invocation you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformGetWorkflowInvocationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_get_workflow_invocation] @@ -127,7 +127,7 @@ To query Workflow Invocation Actions you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformQueryWorkflowInvocationActionsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_query_workflow_invocation_actions] @@ -140,7 +140,7 @@ To cancel a Workflow Invocation you can use: :class:`~airflow.providers.google.cloud.sensors.dataform.DataformCancelWorkflowInvocationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_cancel_workflow_invocation] @@ -152,7 +152,7 @@ Deletes repository. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformDeleteRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_delete_workspace] @@ -164,7 +164,7 @@ Deletes workspace. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformDeleteRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_delete_repository] @@ -176,7 +176,7 @@ Removes file. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformRemoveFileOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_remove_file] @@ -188,7 +188,7 @@ Removes directory. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformRemoveDirectoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_remove_directory] @@ -200,7 +200,7 @@ Creates default projects structure for provided workspace. Before it can be done :class:`~airflow.providers.google.cloud.utils.dataform.make_initialization_workspace_flow` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_initialize_workspace] @@ -212,7 +212,7 @@ Writes file with given content to specified workspace. :class:`~airflow.providers.google.cloud.operators.dataform.DataformWriteFileOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_write_file] @@ -224,7 +224,7 @@ Make directory with given path in specified workspace. :class:`~airflow.providers.google.cloud.operators.dataform.DataformMakeDirectoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_make_directory] @@ -236,7 +236,7 @@ Installs npm packages for specified workspace :class:`~airflow.providers.google.cloud.operators.dataform.DataformInstallNpmPackagesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_install_npm_packages] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst index 0a728cdb10f5..207479d5de96 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst @@ -40,7 +40,7 @@ Restart DataFusion Instance To restart Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_restart_instance_operator] @@ -59,7 +59,7 @@ Delete DataFusion Instance To delete Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_instance_operator] @@ -79,7 +79,7 @@ Create DataFusion Instance To create Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_instance_operator] @@ -99,7 +99,7 @@ Update DataFusion Instance To update Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_update_instance_operator] @@ -118,7 +118,7 @@ Get DataFusion Instance To retrieve Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionGetInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_get_instance_operator] @@ -138,7 +138,7 @@ Create a DataFusion pipeline To create Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_pipeline] @@ -157,7 +157,7 @@ Start a DataFusion pipeline To start Data Fusion pipeline using synchronous mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline] @@ -166,7 +166,7 @@ To start Data Fusion pipeline using synchronous mode: To start Data Fusion pipeline using asynchronous mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_async] @@ -179,7 +179,7 @@ It is not possible to use both asynchronous and deferrable parameters at the sam Please, check the example of using deferrable mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_def] @@ -198,7 +198,7 @@ Stop a DataFusion pipeline To stop Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStopPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_stop_pipeline] @@ -217,7 +217,7 @@ Delete a DataFusion pipeline To delete Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_pipeline] @@ -237,7 +237,7 @@ List DataFusion pipelines To list Data Fusion pipelines use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionListPipelinesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_list_pipelines] @@ -255,7 +255,7 @@ When start pipeline is triggered asynchronously sensors may be used to run check :class:`~airflow.providers.google.cloud.sensors.datafusion.CloudDataFusionPipelineStateSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst b/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst index 10186138edb2..1123c48d6d79 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst @@ -51,7 +51,7 @@ The project id and location will be used to build the parent name needed to crea Here is an example of how you can create a Data Pipelines instance by running the above parameters with CreateDataPipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datapipelines/example_datapipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_create_data_pipeline] @@ -79,7 +79,7 @@ The Project ID and Location will be used to build the parent name, which is wher You can run a Data Pipelines instance by running the above parameters with RunDataPipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datapipelines/example_datapipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_run_data_pipeline] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst index 1b8cc56dcf51..788024ce0bbe 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst @@ -31,7 +31,7 @@ For more information about the available fields to pass when creating a task, vi A simple task configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 0 :start-after: [START howto_dataplex_configuration] @@ -40,13 +40,13 @@ A simple task configuration can look as followed: With this configuration we can create the task both synchronously & asynchronously: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_task_operator] :end-before: [END howto_dataplex_create_task_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_async_create_task_operator] @@ -59,7 +59,7 @@ To delete a task you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_task_operator] @@ -72,7 +72,7 @@ To list tasks you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexListTasksOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_list_tasks_operator] @@ -85,7 +85,7 @@ To get a task you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_task_operator] @@ -98,7 +98,7 @@ To wait for a task created asynchronously you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexTaskStateSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_task_state_sensor] @@ -113,7 +113,7 @@ For more information about the available fields to pass when creating a lake, vi A simple task configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 0 :start-after: [START howto_dataplex_lake_configuration] @@ -123,7 +123,7 @@ With this configuration we can create the lake: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateLakeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_lake_operator] @@ -136,7 +136,7 @@ To delete a lake you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteLakeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_lake_operator] @@ -150,7 +150,7 @@ For more information about the available fields to pass when creating a Data Qua A simple Data Quality scan configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_data_quality_configuration] @@ -160,7 +160,7 @@ With this configuration we can create or update the Data Quality scan: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateOrUpdateDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_data_quality_operator] @@ -173,7 +173,7 @@ To get a Data Quality scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_operator] @@ -188,7 +188,7 @@ To delete a Data Quality scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_data_quality_operator] @@ -201,7 +201,7 @@ You can run Dataplex Data Quality scan in asynchronous modes to later check its :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexRunDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_quality_operator] @@ -211,7 +211,7 @@ To check that running Dataplex Data Quality scan succeeded you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexDataQualityJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_data_scan_job_state_sensor] @@ -219,7 +219,7 @@ To check that running Dataplex Data Quality scan succeeded you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_quality_def_operator] @@ -232,7 +232,7 @@ To get a Data Quality scan job you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataQualityScanResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_job_operator] @@ -240,7 +240,7 @@ To get a Data Quality scan job you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_job_def_operator] @@ -255,7 +255,7 @@ For more information about the available fields to pass when creating a zone, vi A simple zone configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_zone_configuration] @@ -265,7 +265,7 @@ With this configuration we can create a zone: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateZoneOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_zone_operator] @@ -278,7 +278,7 @@ To delete a zone you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteZoneOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_zone_operator] @@ -293,7 +293,7 @@ For more information about the available fields to pass when creating a asset, v A simple asset configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_asset_configuration] @@ -303,7 +303,7 @@ With this configuration we can create the asset: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateAssetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_asset_operator] @@ -316,7 +316,7 @@ To delete a asset you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteAssetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_asset_operator] @@ -330,7 +330,7 @@ For more information about the available fields to pass when creating a Data Pro A simple Data Profile scan configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 0 :start-after: [START howto_dataplex_data_profile_configuration] @@ -340,7 +340,7 @@ With this configuration we can create or update the Data Profile scan: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateOrUpdateDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_data_profile_operator] @@ -353,7 +353,7 @@ To get a Data Profile scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_profile_operator] @@ -368,7 +368,7 @@ To delete a Data Profile scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_data_profile_operator] @@ -381,7 +381,7 @@ You can run Dataplex Data Profile scan in asynchronous modes to later check its :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexRunDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_profile_operator] @@ -391,7 +391,7 @@ To check that running Dataplex Data Profile scan succeeded you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexDataProfileJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_data_scan_job_state_sensor] @@ -399,7 +399,7 @@ To check that running Dataplex Data Profile scan succeeded you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_profile_def_operator] @@ -412,7 +412,7 @@ To get a Data Profile scan job you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataProfileScanResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_profile_job_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst index 661443f26470..8e7ad129fa0a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst @@ -59,7 +59,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_run_job_group_operator] @@ -77,7 +77,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_jobs_for_job_group_operator] @@ -96,7 +96,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_job_group_operator] @@ -112,7 +112,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_copy_flow_operator] @@ -130,7 +130,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_dataprep_run_flow_operator] @@ -148,7 +148,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_delete_flow_operator] @@ -167,7 +167,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_job_group_finished_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst index 1f7bac8566ca..f8489dd6795e 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst @@ -56,7 +56,7 @@ For more information about the available fields to pass when creating a cluster, A cluster configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -65,7 +65,7 @@ A cluster configuration can look as followed: With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator] @@ -84,7 +84,7 @@ This can be beneficial for running Dataproc workloads on GKE while optimizing co To create Dataproc cluster in Google Kubernetes Engine you could pass cluster configuration: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_in_gke_config] @@ -93,7 +93,7 @@ To create Dataproc cluster in Google Kubernetes Engine you could pass cluster co With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator_in_gke] @@ -105,7 +105,7 @@ Note that default image might not support the chosen optional component. If this is your case, please specify correct ``image_version`` that you can find in the `documentation. `__ -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -118,7 +118,7 @@ If this is your case, please specify correct ``image_version`` that you can find `documentation. `__ -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -126,7 +126,7 @@ If this is your case, please specify correct ``image_version`` that you can find You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator_async] @@ -139,7 +139,7 @@ this could be easily done using **make()** of :class:`~airflow.providers.google.cloud.operators.dataproc.ClusterGenerator` You can generate and use config as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_generate_cluster_config] @@ -156,7 +156,7 @@ For more information about the available fields to pass when diagnosing a cluste To diagnose a Dataproc cluster use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDiagnoseClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_diagnose_cluster] @@ -164,7 +164,7 @@ To diagnose a Dataproc cluster use: You can also use deferrable mode in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_diagnose_cluster_deferrable] @@ -178,7 +178,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new cluster config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_updatemask_cluster_operator] @@ -187,7 +187,7 @@ An example of a new cluster config and the updateMask: To update a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocUpdateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator] @@ -195,7 +195,7 @@ To update a cluster you can use: You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator_async] @@ -207,7 +207,7 @@ Starting a cluster To start a cluster you can use the :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocStartClusterOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_start_cluster_operator] @@ -219,7 +219,7 @@ Stopping a cluster To stop a cluster you can use the :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocStopClusterOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_stop_cluster_operator] @@ -231,7 +231,7 @@ Deleting a cluster To delete a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator] @@ -239,7 +239,7 @@ To delete a cluster you can use: You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator_async] @@ -258,7 +258,7 @@ file system. You can specify a file:/// path to refer to a local file on a clust The job configuration can be submitted by using: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_submit_job_to_cluster_operator] @@ -273,7 +273,7 @@ There are more arguments to provide in the jobs than the examples show. For the Example of the configuration for a PySpark Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pyspark_config] @@ -281,7 +281,7 @@ Example of the configuration for a PySpark Job: Example of the configuration for a SparkSQl Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparksql_config] @@ -289,7 +289,7 @@ Example of the configuration for a SparkSQl Job: Example of the configuration for a Spark Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_config] @@ -297,7 +297,7 @@ Example of the configuration for a Spark Job: Example of the configuration for a Spark Job running in `deferrable mode `__: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_deferrable_config] @@ -305,7 +305,7 @@ Example of the configuration for a Spark Job running in `deferrable mode `__: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_for_persistent_history_server] @@ -434,7 +434,7 @@ with specific parameters. Documentation how create cluster you can find After Cluster was created you should add it to the Batch configuration. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator_with_persistent_history_server] @@ -443,7 +443,7 @@ After Cluster was created you should add it to the Batch configuration. To check if operation succeeded you can use :class:`~airflow.providers.google.cloud.sensors.dataproc.DataprocBatchSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_batch_async_sensor] @@ -451,7 +451,7 @@ To check if operation succeeded you can use Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator_async] @@ -463,7 +463,7 @@ Get a Batch To get a batch you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocGetBatchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_get_batch_operator] @@ -475,7 +475,7 @@ List a Batch To get a list of exists batches you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocListBatchesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_list_batches_operator] @@ -487,7 +487,7 @@ Delete a Batch To delete a batch you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteBatchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_batch_operator] @@ -499,7 +499,7 @@ Cancel a Batch Operation To cancel a operation you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCancelOperationOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_cancel_operation_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst index 6bf5bc9cf540..7f04f9db81a1 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst @@ -33,7 +33,7 @@ For more information about the available fields to pass when creating a service, A simple service configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_service] @@ -42,7 +42,7 @@ A simple service configuration can look as followed: With this configuration we can create the service: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_service_operator] @@ -55,7 +55,7 @@ To get a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreGetServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_get_service_operator] @@ -69,7 +69,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new service config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_update_service] @@ -78,7 +78,7 @@ An example of a new service config and the updateMask: To update a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreUpdateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_update_service_operator] @@ -91,7 +91,7 @@ To delete a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreDeleteServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_delete_service_operator] @@ -104,7 +104,7 @@ To export metadata you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreExportMetadataOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_export_metadata_operator] @@ -117,7 +117,7 @@ To restore a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreRestoreServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_restore_service_operator] @@ -131,7 +131,7 @@ For more information about the available fields to pass when creating a metadata A simple metadata import configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_metadata_import] @@ -140,7 +140,7 @@ A simple metadata import configuration can look as followed: To create a metadata import you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateMetadataImportOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_metadata_import_operator] @@ -154,7 +154,7 @@ For more information about the available fields to pass when creating a backup, A simple backup configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_backup] @@ -163,7 +163,7 @@ A simple backup configuration can look as followed: With this configuration we can create the backup: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateBackupOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_backup_operator] @@ -176,7 +176,7 @@ To delete a backup you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreDeleteBackupOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_delete_backup_operator] @@ -189,7 +189,7 @@ To list backups you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreListBackupsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_list_backups_operator] @@ -201,7 +201,7 @@ Check Hive partitions existence To check that Hive partitions have been created in the Metastore for a given table you can use: :class:`~airflow.providers.google.cloud.sensors.dataproc_metastore.MetastoreHivePartitionSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_hive_partition_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst index 8218e526a192..05b441543fbc 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst @@ -38,7 +38,7 @@ Export Entities To export entities from Google Cloud Datastore to Cloud Storage use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_export_task] @@ -52,7 +52,7 @@ Import Entities To import entities from Cloud Storage to Google Cloud Datastore use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_import_task] @@ -66,7 +66,7 @@ Allocate Ids To allocate IDs for incomplete keys use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_allocate_ids] @@ -74,7 +74,7 @@ To allocate IDs for incomplete keys use An example of a partial keys required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_keys_def] @@ -88,7 +88,7 @@ Begin transaction To begin a new transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_begin_transaction] @@ -96,7 +96,7 @@ To begin a new transaction use An example of a transaction options required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_transaction_def] @@ -110,7 +110,7 @@ Commit transaction To commit a transaction, optionally creating, deleting or modifying some entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_commit_task] @@ -118,7 +118,7 @@ use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCo An example of a commit information required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_commit_def] @@ -132,7 +132,7 @@ Run query To run a query for entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 4 :start-after: [START how_to_run_query] @@ -140,7 +140,7 @@ To run a query for entities use An example of a query required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 0 :start-after: [START how_to_query_def] @@ -154,7 +154,7 @@ Roll back transaction To roll back a transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_rollback.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_rollback.py :language: python :dedent: 4 :start-after: [START how_to_rollback_transaction] @@ -168,7 +168,7 @@ Get operation state To get the current state of a long-running operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreGetOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START get_operation_state] @@ -182,7 +182,7 @@ Delete operation To delete an operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreDeleteOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START delete_operation] diff --git a/docs/apache-airflow-providers-google/operators/cloud/functions.rst b/docs/apache-airflow-providers-google/operators/cloud/functions.rst index d8ac6bbdd6d0..3124e8e356f7 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/functions.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/functions.rst @@ -38,7 +38,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_delete] @@ -47,7 +47,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_delete_template_fields] @@ -77,7 +77,7 @@ Arguments When a DAG is created, the default_args dictionary can be used to pass arguments common with other tasks: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_default_args] :end-before: [END howto_operator_gcf_default_args] @@ -101,19 +101,19 @@ Using the operator Depending on the combination of parameters, the Function's source code can be obtained from different sources: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_body] :end-before: [END howto_operator_gcf_deploy_body] -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_variants] :end-before: [END howto_operator_gcf_deploy_variants] The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy] @@ -122,7 +122,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy_no_project_id] @@ -131,7 +131,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_deploy_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst index c4be59533a0a..82d38fda87b8 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst @@ -46,7 +46,7 @@ The time span is defined by the time span's start and end timestamps. If a DAG does not have a *next* DAG instance scheduled, the time span end infinite, meaning the operator processes all files older than ``data_interval_start``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_timespan_file_transform_operator_Task] @@ -66,7 +66,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_bucket_create_acl_entry_task] @@ -75,7 +75,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_bucket_create_acl_template_fields] @@ -100,7 +100,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_object_create_acl_entry_task] @@ -109,7 +109,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_object_create_acl_template_fields] @@ -131,7 +131,7 @@ Deleting Bucket allows you to remove bucket object from the Google Cloud Storage It is performed through the :class:`~airflow.providers.google.cloud.operators.gcs.GCSDeleteBucketOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_delete_bucket] @@ -160,7 +160,7 @@ GCSObjectExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` to wait (poll) for the existence of a file in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task] @@ -168,7 +168,7 @@ Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSe Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task_defered] @@ -181,7 +181,7 @@ GCSObjectExistenceAsyncSensor :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceAsyncSensor` is deprecated and will be removed in a future release. Please use :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` and use the deferrable mode in that operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task_async] @@ -195,7 +195,7 @@ GCSObjectsWithPrefixExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor` to wait (poll) for the existence of a file with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_with_prefix_exists_task] @@ -205,7 +205,7 @@ You can set the ``deferrable`` param to True if you want this sensor to run asyn efficient utilization of resources in your Airflow deployment. However the triggerer component needs to be enabled for this functionality to work. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_with_prefix_exists_task_async] @@ -220,7 +220,7 @@ GCSUploadSessionCompleteSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor` to check for a change in the number of files with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_gcs_upload_session_complete_task] @@ -229,7 +229,7 @@ Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionComp You can set the parameter ``deferrable`` to True if you want the worker slots to be freed up while sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_gcs_upload_session_async_task] @@ -242,7 +242,7 @@ GCSObjectUpdateSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor` to check if an object is updated in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_update_exists_task] @@ -252,7 +252,7 @@ You can set the ``deferrable`` param to True if you want this sensor to run asyn utilization of resources in your Airflow deployment. However the triggerer component needs to be enabled for this functionality to work. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_update_exists_task_async] diff --git a/docs/apache-airflow-providers-google/operators/cloud/index.rst b/docs/apache-airflow-providers-google/operators/cloud/index.rst index f974c0eb81a4..3daed7a11e9b 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/index.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/index.rst @@ -29,4 +29,4 @@ Google Cloud Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index d71ebf87e929..9eaf26882497 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -43,7 +43,7 @@ Create GKE cluster Here is an example of a cluster definition: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :start-after: [START howto_operator_gcp_gke_create_cluster_definition] :end-before: [END howto_operator_gcp_gke_create_cluster_definition] @@ -53,7 +53,7 @@ A dict object like this, or a definition, is required when creating a cluster with :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster] @@ -64,7 +64,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster_async] @@ -84,7 +84,7 @@ To install and use Kueue on your cluster with the help of :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartKueueInsideClusterOperator` as shown in this example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py :language: python :start-after: [START howto_operator_gke_install_kueue] :end-before: [END howto_operator_gke_install_kueue] @@ -99,7 +99,7 @@ To delete a cluster, use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`. This would also delete all the nodes allocated to the cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster] @@ -110,7 +110,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster_async] @@ -167,7 +167,7 @@ is the path ``/airflow/xcom``. To provide values to the XCom, ensure your Pod wr ``return.json`` in the sidecar. The contents of this can then be used downstream in your DAG. Here is an example of it being used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom] @@ -175,7 +175,7 @@ Here is an example of it being used: And then use it in other operators: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_xcom_result] @@ -186,7 +186,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom_async] @@ -207,7 +207,7 @@ There are two operators available in order to run a job on a GKE cluster: There is no need to manage the ``kube_config`` file, as it will be generated automatically. All Kubernetes parameters (except ``config_file``) are also valid for the ``GKEStartJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_job] @@ -216,7 +216,7 @@ All Kubernetes parameters (except ``config_file``) are also valid for the ``GKES ``GKEStartJobOperator`` also supports deferrable mode. Note that it makes sense only if the ``wait_until_job_complete`` parameter is set ``True``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_job_def] @@ -224,7 +224,7 @@ parameter is set ``True``. For run Job on a GKE cluster with Kueue enabled use ``GKEStartKueueJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py :language: python :dedent: 4 :start-after: [START howto_operator_kueue_start_job] @@ -245,7 +245,7 @@ There are two operators available in order to delete a job on a GKE cluster: There is no need to manage the ``kube_config`` file, as it will be generated automatically. All Kubernetes parameters (except ``config_file``) are also valid for the ``GKEDeleteJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_job] @@ -260,7 +260,7 @@ Retrieve information about Job by given name You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDescribeJobOperator` to retrieve detailed description of existing Job by providing its name and namespace. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_describe_job] @@ -276,7 +276,7 @@ You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine. list of existing Jobs. If ``namespace`` parameter is provided, output will include Jobs across given namespace. If ``namespace`` parameter is not specified, the information across all the namespaces will be outputted. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_list_jobs] @@ -291,7 +291,7 @@ Create a resource in a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateCustomResourceOperator` to create resource in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_resource] @@ -306,7 +306,7 @@ Delete a resource in a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteCustomResourceOperator` to delete resource in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_resource] @@ -321,7 +321,7 @@ Suspend a Job on a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKESuspendJobOperator` to suspend Job in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_suspend_job] @@ -336,7 +336,7 @@ Resume a Job on a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEResumeJobOperator` to resume Job in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_resume_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst index 49f9b6c4ea9c..6e7676e91050 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst @@ -34,7 +34,7 @@ Pipeline Configuration In order to run the pipeline, it is necessary to configure the request body. Here is an example of the pipeline configuration with a single action. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_simple_action_pipeline] @@ -42,7 +42,7 @@ Here is an example of the pipeline configuration with a single action. The pipeline can also be configured with multiple action. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_multiple_action_pipeline] @@ -59,7 +59,7 @@ Use the :class:`~airflow.providers.google.cloud.operators.life_sciences.LifeSciencesRunPipelineOperator` to execute pipelines. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_run_pipeline] diff --git a/docs/apache-airflow-providers-google/operators/cloud/looker.rst b/docs/apache-airflow-providers-google/operators/cloud/looker.rst index f89d23c1c4c2..c8d3c2ad069a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/looker.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/looker.rst @@ -49,7 +49,7 @@ To submit a PDT materialization job to Looker you need to provide a model and vi The job configuration can be submitted in synchronous (blocking) mode by using: :class:`~airflow.providers.google.cloud.operators.looker.LookerStartPdtBuildOperator`. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_looker.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_looker.py :language: python :dedent: 4 :start-after: [START how_to_cloud_looker_start_pdt_build_operator] @@ -60,7 +60,7 @@ Alternatively, the job configuration can be submitted in asynchronous mode by us :class:`~airflow.providers.google.cloud.operators.looker.LookerStartPdtBuildOperator` and :class:`~airflow.providers.google.cloud.sensors.looker.LookerCheckPdtBuildSensor`. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_looker.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_looker.py :language: python :dedent: 4 :start-after: [START cloud_looker_async_start_pdt_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst index f64705e1c267..0848e1a28741 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst @@ -49,7 +49,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -69,7 +69,7 @@ of any types. For example, you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator`. The result of running this operator will be ready-to-use model saved in Model Registry. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -87,7 +87,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_get_model] @@ -98,7 +98,7 @@ fields to dynamically determine their values. The result are saved to :ref:`XCom allowing them to be used by other operators. In this case, the :class:`~airflow.providers.standard.operators.bash.BashOperator` is used to print the model information. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_print_model] @@ -120,7 +120,7 @@ instead. In this case, the new version of specific model could be created by spe ``parent_model`` parameter when running Training Job. This will ensure that new version of model will be trained except of creating new model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -129,7 +129,7 @@ of creating new model. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` can also be used to create more versions with varying parameters. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v2] @@ -151,7 +151,7 @@ in format ``projects/{project}/locations/{location}/models/{model_id}@{version_i ``projects/{project}/locations/{location}/models/{model_id}@{version_alias}``. By default, the first model version created will be marked as default. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_default_version] @@ -166,7 +166,7 @@ This operator is deprecated. Please, use instead. You can pass the name of the desired model in ``model_id`` parameter. If the model ID is passed with version aliases, the operator will output all the versions available for this model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_list_versions] @@ -185,7 +185,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_start_batch_prediction] @@ -204,7 +204,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelVersionOperator` instead. The default version could not be deleted on the model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_version] @@ -218,7 +218,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_model] diff --git a/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst index 7b54e812c2dd..0ba46ccf76b0 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst @@ -44,14 +44,14 @@ representing text. Here is an example of document with text provided as a string: -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_text] :end-before: [END howto_operator_gcp_natural_language_document_text] In addition to supplying string, a document can refer to content stored in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_gcs] :end-before: [END howto_operator_gcp_natural_language_document_gcs] @@ -66,7 +66,7 @@ public figures, landmarks, etc.), and returns information about those entities. Entity analysis is performed with the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities] @@ -77,7 +77,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities_result] @@ -94,7 +94,7 @@ as positive, negative, or neutral. Sentiment analysis is performed through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment] @@ -105,7 +105,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment_result] @@ -123,7 +123,7 @@ through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment] @@ -134,7 +134,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment_result] @@ -151,7 +151,7 @@ content in a document, use the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text] @@ -162,7 +162,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text_result] diff --git a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst index 8fb497a14f01..74091ccc1eec 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst @@ -41,7 +41,7 @@ Creating a PubSub topic The PubSub topic is a named resource to which messages are sent by publishers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator` operator creates a topic. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_topic] :end-before: [END howto_operator_gcp_pubsub_create_topic] @@ -56,7 +56,7 @@ A ``Subscription`` is a named resource representing the stream of messages from to be delivered to the subscribing application. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator` operator creates the subscription. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_subscription] :end-before: [END howto_operator_gcp_pubsub_create_subscription] @@ -70,7 +70,7 @@ Publishing PubSub messages A ``Message`` is a combination of data and (optional) attributes that a publisher sends to a topic and is eventually delivered to subscribers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator` operator would publish messages. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_publish] :end-before: [END howto_operator_gcp_pubsub_publish] @@ -83,32 +83,32 @@ Pulling messages from a PubSub subscription The :class:`~airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor` sensor pulls messages from a PubSub subscription and pass them through XCom. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_sensor] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_sensor] Also for this action you can use sensor in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_pubsub_pull_message_with_async_sensor] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_async_sensor] -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_operator] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_operator] To pull messages from XCom use the :class:`~airflow.providers.standard.operators.bash.BashOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result_cmd] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result_cmd] -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result] @@ -121,7 +121,7 @@ Deleting a PubSub subscription The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator` operator deletes the subscription. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_unsubscribe] :end-before: [END howto_operator_gcp_pubsub_unsubscribe] @@ -134,7 +134,7 @@ Deleting a PubSub topic The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator` operator deletes topic. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_delete_topic] :end-before: [END howto_operator_gcp_pubsub_delete_topic] diff --git a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst index b505fbe187f6..fbb79bbf68eb 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst @@ -41,7 +41,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_deploy] @@ -50,7 +50,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_deploy_template_fields] @@ -80,7 +80,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_delete] @@ -89,7 +89,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] @@ -120,7 +120,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_deploy] @@ -129,7 +129,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_deploy_template_fields] @@ -164,13 +164,13 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update] :end-before: [END howto_operator_spanner_database_update] -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update_idempotent] @@ -179,7 +179,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_update_template_fields] @@ -207,7 +207,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_query] @@ -216,7 +216,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_query_template_fields] @@ -246,7 +246,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_delete] @@ -255,7 +255,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst index 006c21657c09..3114a359de96 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst @@ -42,14 +42,14 @@ google.cloud.speech_v1.types module for more information, see: https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/api.html#google.cloud.speech_v1.SpeechClient.recognize -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] filename is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_speech_to_text_api_arguments] :end-before: [END howto_operator_speech_to_text_api_arguments] @@ -57,7 +57,7 @@ filename is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :dedent: 4 :start-after: [START howto_operator_speech_to_text_recognize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/speech_to_text.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/speech_to_text.py :language: python :dedent: 4 :start-after: [START gcp_speech_to_text_synthesize_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst index ecad902d4e52..04b732f6ce9c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst @@ -40,7 +40,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_alert_policy] @@ -60,7 +60,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_alert_policy] @@ -80,7 +80,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_alert_policy] @@ -101,7 +101,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_upsert_alert_policy] @@ -120,7 +120,7 @@ Using the operator The name of the alert to be deleted should be given in the format projects//alertPolicies/ -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_alert_policy] @@ -140,7 +140,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_notification_channel] @@ -160,7 +160,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_notification_channel] @@ -180,7 +180,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -201,7 +201,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -220,7 +220,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_notification_channel] diff --git a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst index af9f03bc33b7..c67a0b77d971 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst @@ -41,7 +41,7 @@ Create queue To create new Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START create_queue] @@ -55,7 +55,7 @@ Delete queue To delete Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START delete_queue] @@ -70,7 +70,7 @@ Resume queue To resume Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueResumeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START resume_queue] @@ -84,7 +84,7 @@ Pause queue To pause Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePauseOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START pause_queue] @@ -98,7 +98,7 @@ Purge queue To purge Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePurgeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START purge_queue] @@ -112,7 +112,7 @@ Get queue To get Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START get_queue] @@ -126,7 +126,7 @@ Update queue To update Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueUpdateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START update_queue] @@ -140,7 +140,7 @@ List queues To list all Queues use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuesListOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START list_queue] @@ -158,7 +158,7 @@ Create task To create new Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] @@ -172,7 +172,7 @@ Get task To get the Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START tasks_get] @@ -186,7 +186,7 @@ Run task To run the Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskRunOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START run_task] @@ -200,7 +200,7 @@ List tasks To list all Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTasksListOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START list_tasks] @@ -214,7 +214,7 @@ Delete task To delete the Task from particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] diff --git a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst index d200e902b3f9..78bf9e5eaed6 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst @@ -42,14 +42,14 @@ The ``input``, ``voice`` and ``audio_config`` arguments need to be dicts or obje for more information, see: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/api.html#google.cloud.texttospeech_v1.TextToSpeechClient.synthesize_speech -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] The ``filename`` argument is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_gcp_filename] :end-before: [END howto_operator_text_to_speech_gcp_filename] @@ -57,7 +57,7 @@ The ``filename`` argument is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_text_to_speech_synthesize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/text_to_speech.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/text_to_speech.py :language: python :dedent: 4 :start-after: [START gcp_text_to_speech_synthesize_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/translate.rst b/docs/apache-airflow-providers-google/operators/cloud/translate.rst index 518d7d28f72b..579236cb0883 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/translate.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate.rst @@ -40,7 +40,7 @@ Using the operator Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate/example_translate.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_text] @@ -49,7 +49,7 @@ Basic usage of the operator: The result of translation is available as dictionary or array of dictionaries accessible via the usual XCom mechanisms of Airflow: -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate/example_translate.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_access] @@ -59,7 +59,7 @@ XCom mechanisms of Airflow: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/translate.py :language: python :dedent: 4 :start-after: [START translate_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst index c02f45c25216..d1043d0e51cf 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst @@ -43,7 +43,7 @@ for more information, see: https://googleapis.github.io/google-cloud-python/late Arguments for translation need to be specified. -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate_speech/example_translate_speech.py :language: python :start-after: [START howto_operator_translate_speech_arguments] :end-before: [END howto_operator_translate_speech_arguments] @@ -52,7 +52,7 @@ Arguments for translation need to be specified. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate_speech/example_translate_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_speech] @@ -61,7 +61,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate_speech.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/translate_speech.py :language: python :dedent: 4 :start-after: [START translate_speech_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst b/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst index 8fb76cd80fde..b6bc94620ff2 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst @@ -33,7 +33,7 @@ To create a Google VertexAI dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.CreateDatasetOperator`. The operator returns dataset id in :ref:`XCom ` under ``dataset_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_dataset_operator] @@ -42,7 +42,7 @@ The operator returns dataset id in :ref:`XCom ` under ``dataset_i After creating a dataset you can use it to import some data using :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ImportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_import_data_operator] @@ -51,7 +51,7 @@ After creating a dataset you can use it to import some data using To export dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ExportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_export_data_operator] @@ -60,7 +60,7 @@ To export dataset you can use To delete dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.DeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_dataset_operator] @@ -69,7 +69,7 @@ To delete dataset you can use To get dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.GetDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_dataset_operator] @@ -78,7 +78,7 @@ To get dataset you can use To get a dataset list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ListDatasetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_dataset_operator] @@ -87,7 +87,7 @@ To get a dataset list you can use To update dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.UpdateDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_update_dataset_operator] @@ -115,7 +115,7 @@ create image you can find by this link: https://cloud.google.com/vertex-ai/docs/ After that you should put link to the image in ``container_uri`` parameter. Also you can type executing command for container which will be created from this image in ``command`` parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_container_training_job_operator] @@ -124,7 +124,7 @@ for container which will be created from this image in ``command`` parameter. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomContainerTrainingJobOperator` also provides the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_container_training_job_operator_deferrable] @@ -138,7 +138,7 @@ create you can find by this link: https://cloud.google.com/vertex-ai/docs/traini Next you should put link to the package in ``python_package_gcs_uri`` parameter, also ``python_module_name`` parameter should has the name of script which will run your training task. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_python_package_training_job_operator] @@ -147,7 +147,7 @@ parameter should has the name of script which will run your training task. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` also provides the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_python_package_training_job_operator_deferrable] @@ -158,7 +158,7 @@ How to run a Custom Training Job To create and run a Custom Training Job you should put the path to your local training script inside the ``script_path`` parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_operator] @@ -166,7 +166,7 @@ To create and run a Custom Training Job you should put the path to your local tr The same operation can be performed in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_operator_deferrable] @@ -176,7 +176,7 @@ Additionally, you can create a new version of an existing Custom Training Job. I Model with another version, instead of creating a new Model in the Model Registry. This can be done by specifying the ``parent_model`` parameter when running a Custom Training Job. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_v2_operator] @@ -184,7 +184,7 @@ This can be done by specifying the ``parent_model`` parameter when running a Cus The same operation can be performed in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_v2_deferrable_operator] @@ -194,7 +194,7 @@ The same operation can be performed in the deferrable mode: You can get a list of Training Jobs using :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.ListCustomTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_custom_training_job_operator] @@ -203,7 +203,7 @@ You can get a list of Training Jobs using If you wish to delete a Custom Training Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.DeleteCustomTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_custom_training_job_operator] @@ -227,7 +227,7 @@ How to run AutoML Forecasting Training Job Before start running this Job you must prepare and create ``TimeSeries`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_forecasting_training_job_operator] @@ -239,7 +239,7 @@ How to run AutoML Image Training Job Before start running this Job you must prepare and create ``Image`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator] @@ -251,7 +251,7 @@ How to run AutoML Tabular Training Job Before start running this Job you must prepare and create ``Tabular`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_tabular_training_job_operator] @@ -274,7 +274,7 @@ How to run AutoML Video Training Job Before start running this Job you must prepare and create ``Video`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_video_training_job_operator] @@ -284,7 +284,7 @@ Additionally, you can create new version of existing AutoML Video Training Job. version of existing Model instead of new Model created in Model Registry. This can be done by specifying ``parent_model`` parameter when running AutoML Video Training Job. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_video_training_job_v2_operator] @@ -293,7 +293,7 @@ version of existing Model instead of new Model created in Model Registry. This c You can get a list of AutoML Training Jobs using :class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.ListAutoMLTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_auto_ml_training_job_operator] @@ -302,7 +302,7 @@ You can get a list of AutoML Training Jobs using If you wish to delete a Auto ML Training Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.DeleteAutoMLTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_auto_ml_training_job_operator] @@ -315,7 +315,7 @@ To create a Google VertexAI Batch Prediction Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator`. The operator returns batch prediction job id in :ref:`XCom ` under ``batch_prediction_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator] @@ -324,7 +324,7 @@ The operator returns batch prediction job id in :ref:`XCom ` unde The :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator` also provides deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator_def] @@ -334,7 +334,7 @@ also provides deferrable mode: To delete batch prediction job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.DeleteBatchPredictionJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_batch_prediction_job_operator] @@ -343,7 +343,7 @@ To delete batch prediction job you can use To get a batch prediction job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.ListBatchPredictionJobsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_batch_prediction_job_operator] @@ -356,7 +356,7 @@ To create a Google VertexAI endpoint you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.CreateEndpointOperator`. The operator returns endpoint id in :ref:`XCom ` under ``endpoint_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_endpoint_operator] @@ -365,7 +365,7 @@ The operator returns endpoint id in :ref:`XCom ` under ``endpoint After creating an endpoint you can use it to deploy some model using :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeployModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_deploy_model_operator] @@ -374,7 +374,7 @@ After creating an endpoint you can use it to deploy some model using To un deploy model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.UndeployModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_undeploy_model_operator] @@ -383,7 +383,7 @@ To un deploy model you can use To delete endpoint you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeleteEndpointOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_endpoint_operator] @@ -392,7 +392,7 @@ To delete endpoint you can use To get an endpoint list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.ListEndpointsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_endpoints_operator] @@ -405,7 +405,7 @@ To create a Google VertexAI hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.CreateHyperparameterTuningJobOperator`. The operator returns hyperparameter tuning job id in :ref:`XCom ` under ``hyperparameter_tuning_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_hyperparameter_tuning_job_operator] @@ -414,7 +414,7 @@ The operator returns hyperparameter tuning job id in :ref:`XCom ` :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.CreateHyperparameterTuningJobOperator` also supports deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_hyperparameter_tuning_job_operator_deferrable] @@ -423,7 +423,7 @@ also supports deferrable mode: To delete hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.DeleteHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_hyperparameter_tuning_job_operator] @@ -432,7 +432,7 @@ To delete hyperparameter tuning job you can use To get hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.GetHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_hyperparameter_tuning_job_operator] @@ -441,7 +441,7 @@ To get hyperparameter tuning job you can use To get a hyperparameter tuning job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.ListHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_hyperparameter_tuning_job_operator] @@ -454,7 +454,7 @@ To upload a Google VertexAI model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.UploadModelOperator`. The operator returns model id in :ref:`XCom ` under ``model_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_upload_model_operator] @@ -463,7 +463,7 @@ The operator returns model id in :ref:`XCom ` under ``model_id`` To export model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ExportModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_export_model_operator] @@ -472,7 +472,7 @@ To export model you can use To delete model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_model_operator] @@ -481,7 +481,7 @@ To delete model you can use To get a model list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ListModelsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_models_operator] @@ -490,7 +490,7 @@ To get a model list you can use To retrieve model by its ID you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_model_operator] @@ -499,7 +499,7 @@ To retrieve model by its ID you can use To list all model versions you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ListModelVersionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_model_versions_operator] @@ -508,7 +508,7 @@ To list all model versions you can use To set a specific version of model as a default one you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.SetDefaultVersionOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_set_version_as_default_operator] @@ -517,7 +517,7 @@ To set a specific version of model as a default one you can use To add aliases to specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.AddVersionAliasesOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_add_version_aliases_operator] @@ -526,7 +526,7 @@ To add aliases to specific version of model you can use To delete aliases from specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteVersionAliasesOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_version_aliases_operator] @@ -535,7 +535,7 @@ To delete aliases from specific version of model you can use To delete specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelVersionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_version_operator] @@ -548,7 +548,7 @@ To run a Google VertexAI Pipeline Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.RunPipelineJobOperator`. The operator returns pipeline job id in :ref:`XCom ` under ``pipeline_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_run_pipeline_job_operator] @@ -557,7 +557,7 @@ The operator returns pipeline job id in :ref:`XCom ` under ``pipe To delete pipeline job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.DeletePipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_pipeline_job_operator] @@ -566,7 +566,7 @@ To delete pipeline job you can use To get pipeline job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.GetPipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_pipeline_job_operator] @@ -575,7 +575,7 @@ To get pipeline job you can use To get a pipeline job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.ListPipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_pipeline_job_operator] @@ -588,7 +588,7 @@ To generate a prediction via language model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.TextGenerationModelPredictOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_text_generation_model_predict_operator] @@ -598,7 +598,7 @@ To generate text embeddings you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.TextEmbeddingModelGetEmbeddingsOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_text_embedding_model_get_embeddings_operator] @@ -608,7 +608,7 @@ To generate content with a generative model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.GenerativeModelGenerateContentOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_generative_model_generate_content_operator] @@ -618,7 +618,7 @@ To run a supervised fine tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.SupervisedFineTuningTrainOperator`. The operator returns the tuned model's endpoint name in :ref:`XCom ` under ``tuned_model_endpoint_name`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_supervised_fine_tuning_train_operator] @@ -629,7 +629,7 @@ To calculates the number of input tokens before sending a request to the Gemini :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.CountTokensOperator`. The operator returns the total tokens in :ref:`XCom ` under ``total_tokens`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_count_tokens_operator] @@ -639,7 +639,7 @@ To evaluate a model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.RunEvaluationOperator`. The operator returns the evaluation summary metrics in :ref:`XCom ` under ``summary_metrics`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_run_evaluation_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst index 6b7bd2412e6e..ea9ca48bd04a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst @@ -38,12 +38,12 @@ Using the operator Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels] @@ -51,7 +51,7 @@ Input uri is an uri to a file in Google Cloud Storage You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels_result] @@ -60,7 +60,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_labels_template_fields] @@ -87,7 +87,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -95,7 +95,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content] @@ -103,7 +103,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content_result] @@ -112,7 +112,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_explicit_content_template_fields] @@ -139,7 +139,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -147,7 +147,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots] @@ -155,7 +155,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots_result] @@ -164,7 +164,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_video_shots_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/vision.rst b/docs/apache-airflow-providers-google/operators/cloud/vision.rst index 393f637dc3c5..897b4ead4f16 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/vision.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/vision.rst @@ -42,17 +42,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -60,7 +60,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set] @@ -68,7 +68,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set_2] @@ -78,7 +78,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_add_product_to_product_set_template_fields] @@ -107,18 +107,18 @@ Using the operator We are using the :class:`~google.cloud.vision.enums` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_enums_import] :end-before: [END howto_operator_vision_enums_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image] @@ -126,7 +126,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image_result] @@ -136,7 +136,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_annotate_image_template_fields] @@ -169,24 +169,24 @@ Using the operator We are using the ``Product`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] The ``product_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create] @@ -194,7 +194,7 @@ The ``product_id`` argument can be omitted (it will be generated by the API): Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create_2] @@ -204,7 +204,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_create_template_fields] @@ -239,7 +239,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete] @@ -247,7 +247,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete_2] @@ -256,7 +256,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_delete_template_fields] @@ -287,7 +287,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get] @@ -295,7 +295,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get_2] @@ -304,7 +304,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_get_template_fields] @@ -331,24 +331,24 @@ Using the operator We are using the ``ProductSet`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create] @@ -356,7 +356,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create_2] @@ -366,7 +366,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_create_template_fields] @@ -395,7 +395,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete] @@ -403,7 +403,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete_2] @@ -412,7 +412,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_delete_template_fields] @@ -439,7 +439,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get] @@ -447,7 +447,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get_2] @@ -456,7 +456,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_get_template_fields] @@ -495,12 +495,12 @@ Using the operator We are using the ``ProductSet`` object from the Google Cloud Vision library: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] @@ -509,7 +509,7 @@ Initialization of the task: If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update] @@ -517,7 +517,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update_2] @@ -526,7 +526,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_update_template_fields] @@ -576,19 +576,19 @@ Using the operator We are using the ``Product`` object from the Google Cloud Vision library: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update] @@ -596,7 +596,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update_2] @@ -605,7 +605,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_update_template_fields] @@ -632,24 +632,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create] @@ -657,7 +657,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create_2] @@ -667,7 +667,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -694,24 +694,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete] @@ -719,7 +719,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete_2] @@ -729,7 +729,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -758,17 +758,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -776,7 +776,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set] @@ -784,7 +784,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set_2] @@ -794,7 +794,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_remove_product_from_product_set_template_fields] @@ -824,12 +824,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text] @@ -837,7 +837,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text_result] @@ -847,7 +847,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_text_set_template_fields] @@ -876,12 +876,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text] @@ -889,7 +889,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text_result] @@ -899,7 +899,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_document_detect_text_set_template_fields] @@ -929,12 +929,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels] @@ -942,7 +942,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels_result] @@ -952,7 +952,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_labels_template_fields] @@ -981,12 +981,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search] @@ -994,7 +994,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search_result] @@ -1004,7 +1004,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_safe_search_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst index 79c7a2072419..aa391cb0189e 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst @@ -39,7 +39,7 @@ Create workflow To create a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_workflow] @@ -47,7 +47,7 @@ To create a workflow use The workflow should be define in similar why to this example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 0 :start-after: [START how_to_define_workflow] @@ -65,7 +65,7 @@ Update workflow To update a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsUpdateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_update_workflow] @@ -79,7 +79,7 @@ Get workflow To get a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_workflow] @@ -93,7 +93,7 @@ List workflows To list workflows use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListWorkflowsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_workflows] @@ -107,7 +107,7 @@ Delete workflow To delete a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsDeleteWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_delete_workflow] @@ -122,7 +122,7 @@ To create an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateExecutionOperator`. This operator is not idempotent due to API limitation. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_execution] @@ -131,7 +131,7 @@ This operator is not idempotent due to API limitation. The create operator does not wait for execution to complete. To wait for execution result use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_wait_for_execution] @@ -145,7 +145,7 @@ Get execution To get an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_execution] @@ -160,7 +160,7 @@ To list executions use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListExecutionsOperator`. By default this operator will return only executions for last 60 minutes. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_executions] @@ -174,7 +174,7 @@ Cancel execution To cancel an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCancelExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_cancel_execution] diff --git a/docs/apache-airflow-providers-google/operators/firebase/firestore.rst b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst index ba7469d9744d..eb274491d2fd 100644 --- a/docs/apache-airflow-providers-google/operators/firebase/firestore.rst +++ b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst @@ -41,7 +41,7 @@ Export database Exports a copy of all or a subset of documents from Google Cloud Firestore to Google Cloud Storage is performed with the :class:`~airflow.providers.google.firebase.operators.firestore.CloudFirestoreExportDatabaseOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_firestore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_firestore.py :language: python :dedent: 4 :start-after: [START howto_operator_export_database_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst b/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst index 2a4f8b41b316..9aeb5fcae80b 100644 --- a/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst +++ b/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst @@ -36,7 +36,7 @@ Put key Get, put, delete key or write_batch, create database with comparator or different options in LevelDB is performed with the :class:`~airflow.providers.google.leveldb.operators.leveldb.LevelDBOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/leveldb/example_leveldb.py +.. exampleinclude:: /../../providers/tests/system/google/leveldb/example_leveldb.py :language: python :dedent: 4 :start-after: [START howto_operator_leveldb_put_key] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst index b4aab1989765..fd1fc199b497 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst @@ -35,7 +35,7 @@ List the Accounts To list accounts from Analytics you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminListAccountsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_list_accounts_operator] @@ -53,7 +53,7 @@ Creates a property. To create a property you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminCreatePropertyOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_create_property_operator] @@ -71,7 +71,7 @@ Deletes a property. To delete a property you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminDeletePropertyOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_delete_property_operator] @@ -89,7 +89,7 @@ Creates a data stream. To create a data stream you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminCreateDataStreamOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_create_data_stream_operator] @@ -107,7 +107,7 @@ Deletes a data stream. To delete a data stream you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminDeleteDataStreamOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_delete_data_stream_operator] @@ -124,7 +124,7 @@ List Google Ads Links To list Google Ads links you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminListGoogleAdsLinksOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_list_google_ads_links] @@ -141,7 +141,7 @@ Get the Google Ads link To list Google Ads links you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminGetGoogleAdsLinkOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_get_google_ad_link] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst index eb3b6b03549e..15b08b72a57b 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst @@ -36,7 +36,7 @@ To delete Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator`. It deletes a report by its unique ID. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_delete_report_operator] @@ -54,7 +54,7 @@ Downloading a report The :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator`. allows you to download a Campaign Manager to Google Cloud Storage bucket. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_get_report_operator] @@ -72,7 +72,7 @@ Waiting for a report Report are generated asynchronously. To wait for report to be ready for downloading you can use :class:`~airflow.providers.google.marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_wait_for_operation] @@ -91,7 +91,7 @@ To insert a Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator`. Running this operator creates a new report. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_report_operator] @@ -111,7 +111,7 @@ Running a report To run Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_run_report_operator] @@ -130,7 +130,7 @@ Inserting conversions To insert Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_conversions] @@ -149,7 +149,7 @@ Updating conversions To update Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_update_conversions] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst index 1669b26cc1ce..4b1f4b81cac8 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst @@ -33,7 +33,7 @@ Creating a Query To create Display&Video 360 query use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateQueryOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_create_query_operator] @@ -52,7 +52,7 @@ Run Query :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360RunQueryOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_run_query_report_operator] @@ -71,7 +71,7 @@ Deleting a report To delete Display&Video 360 report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_delete_query_report_operator] @@ -89,7 +89,7 @@ Waiting for query To wait for the report use :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360RunQuerySensor`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_run_query_sensor] @@ -107,7 +107,7 @@ Downloading a report To download a report to GCS bucket use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportV2Operator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_get_report_operator] @@ -135,7 +135,7 @@ The operator accepts body request: To download line items in CSV format report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_download_line_items_operator] @@ -154,7 +154,7 @@ Upload line items To run Display&Video 360 uploading line items use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_upload_line_items_operator] @@ -172,7 +172,7 @@ Create SDF download task To create SDF download task use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_create_sdf_download_task_operator] @@ -191,7 +191,7 @@ Save SDF files in the Google Cloud Storage To save SDF files and save them in the Google Cloud Storage use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_save_sdf_in_gcs_operator] @@ -209,7 +209,7 @@ Waiting for SDF operation Wait for SDF operation is executed by: :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_for_operation_sensor] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst index 928d60f671a5..00fb9d5ab707 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst @@ -34,7 +34,7 @@ Querying a report To query a Search Ads report use the :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsSearchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_search_query_reports] @@ -52,7 +52,7 @@ Retrieve a field metadata To retrieve metadata of a field use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsGetFieldOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_field] @@ -70,7 +70,7 @@ Retrieve metadata for multiple fields To retrieve metadata of multiple fields use the :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsSearchFieldsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_search_fields] @@ -89,7 +89,7 @@ Retrieve a custom column details To retrieve details of a custom column use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsGetCustomColumnOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_custom_column] @@ -108,7 +108,7 @@ To retrieve the list of all custom columns use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsListCustomColumnsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_list_custom_columns] diff --git a/docs/apache-airflow-providers-google/operators/suite/sheets.rst b/docs/apache-airflow-providers-google/operators/suite/sheets.rst index 0f64c2f0b1dd..0f891a60a3cf 100644 --- a/docs/apache-airflow-providers-google/operators/suite/sheets.rst +++ b/docs/apache-airflow-providers-google/operators/suite/sheets.rst @@ -44,7 +44,7 @@ Create spreadsheet To create new spreadsheet you can use the :class:`~airflow.providers.google.suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets.py :language: python :dedent: 4 :start-after: [START create_spreadsheet] @@ -55,7 +55,7 @@ You can use :ref:`Jinja templating ` with To get the URL of newly created spreadsheet use XCom value: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets.py :language: python :dedent: 4 :start-after: [START print_spreadsheet_url] diff --git a/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst index 1a7fb4c3062b..8bd909a809c2 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst @@ -38,7 +38,7 @@ to transfer data from Azure Blob Storage to Google Cloud Storage. Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py :language: python :start-after: [START how_to_azure_blob_to_gcs] :end-before: [END how_to_azure_blob_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst index e962e2b88520..6efc0d091cc4 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst @@ -36,7 +36,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_fileshare_to_gcs_basic] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst index 23fe393994eb..f9ce21df0dd0 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst @@ -50,7 +50,7 @@ Copying BigQuery tables The following Operator copies data from one or more BigQuery tables to another. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_bigquery] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst index b0eeb0ac5e34..2e2f3ad41a28 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst @@ -53,7 +53,7 @@ Exporting tables The following Operator exports BigQuery table into a GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst index 5fb56ae8701d..382f6fa64588 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst @@ -53,7 +53,7 @@ Transferring data The following Operator copies data from a BigQuery table to MsSQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_mssql] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst index 263db92d053a..0790cc65096f 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst @@ -51,7 +51,7 @@ Transferring data The following Operator copies data from a BigQuery table to MySQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_mysql] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst index 70f0c8c56d05..d600017a05fb 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst @@ -55,7 +55,7 @@ Transferring data The following Operator copies data from a BigQuery table to PostgreSQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_postgres] @@ -63,7 +63,7 @@ The following Operator copies data from a BigQuery table to PostgreSQL. The Operator can also replace data in a PostgreSQL table with matching data from a BigQuery table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_postgres_upsert] diff --git a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst index 7b420f1e6a04..37569d6c895b 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst @@ -37,7 +37,7 @@ Upload data from Google Calendar to GCS To upload data from Google Calendar to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.calendar_to_gcs.GoogleCalendarToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_calendar_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst index 7132417cbb56..003239d676f0 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst @@ -34,7 +34,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator` to execute a Facebook ads report fetch and load to GCS. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py :language: python :start-after: [START howto_operator_facebook_ads_to_gcs] :end-before: [END howto_operator_facebook_ads_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst index a10b3d046c36..a05ed77484a3 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst @@ -53,7 +53,7 @@ Transferring files The following Operator transfers one or more files from GCS into a BigQuery table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_bigquery] @@ -61,7 +61,7 @@ The following Operator transfers one or more files from GCS into a BigQuery tabl Also you can use GCSToBigQueryOperator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_bigquery_async] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst index 6e49dc4229e2..3bb97af557d4 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst @@ -105,7 +105,7 @@ Note that if the flag ``exact_match=False`` then the ``source_object`` will be c in the ``BUCKET_1_SRC`` GCS bucket. That's why if any will be found, they will be copied as well. To prevent this from happening, please use ``exact_match=False``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file] @@ -121,7 +121,7 @@ are both deprecated. Thus, it is not recommended to use them - but to utilize `` The following example would copy the files that matches the glob pattern in ``data/`` folder from ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bucket. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_match_glob] @@ -130,7 +130,7 @@ The following example would copy the files that matches the glob pattern in ``da The following example would copy all the files in ``subdir/`` folder (i.e subdir/a.csv, subdir/b.csv, subdir/c.csv) from the ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bucket. (i.e backup/a.csv, backup/b.csv, backup/c.csv) -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_without_wildcard] @@ -138,7 +138,7 @@ the ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bu -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list] @@ -156,7 +156,7 @@ Note that if the flag ``exact_match=False`` then the ``source_object`` will be c in the ``BUCKET_1_SRC`` GCS bucket. That's why if any will be found, they will be copied as well. To prevent this from happening, please use ``exact_match=False``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file_move] @@ -168,7 +168,7 @@ Move multiple files Multiple files may be moved by supplying ``True`` to the ``move`` argument. The same rules concerning wild cards and the ``delimiter`` argument apply to moves as well as copies. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list_move] @@ -201,7 +201,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_bucket] @@ -214,7 +214,7 @@ This example will ensure all files in ``BUCKET_1_SRC``, including any in subdire ``BUCKET_1_DST``. It will overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_full_bucket] @@ -227,7 +227,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``subdir`` folder in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST/subdir`` if they already exist and it will not delete any files in ``BUCKET_1_DST/subdir`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_to_subdir] @@ -240,7 +240,7 @@ This example will ensure all files in ``BUCKET_1_SRC/subdir``, including any in in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist and it will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC/subdir``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sync_from_subdir] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst index 8a8edd519af5..6b7467f96d1a 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst @@ -47,7 +47,7 @@ Copy single files The following Operator would copy a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_single_file] @@ -58,7 +58,7 @@ Copy into an existing folder The following Operator would copy a single file into an existing folder with the specified ID. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_single_file_into_folder] @@ -69,7 +69,7 @@ Copy multiple files The following Operator would copy all the multiples files (i.e. using wildcard). -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_files] @@ -81,7 +81,7 @@ Move files Using the ``move_object`` parameter allows you to move the files. After copying the file to Google Drive, the original file from the bucket is deleted. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_move_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst index 9dfebb1608f7..87a29e54c498 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst @@ -37,7 +37,7 @@ data from GCS to local filesystem. Below is an example of using this operator to download a file from GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_gcs_download_file_task] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst index a98f537e2bc1..f1f9ea731e88 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst @@ -48,7 +48,7 @@ Copying a single file The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_single_file] @@ -61,7 +61,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to S the original file from the Google Storage is deleted. The ``destination_path`` parameter defines the full path of the file on the SFTP server. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_single_file_destination] @@ -73,7 +73,7 @@ Copying a directory Use the ``wildcard`` in ``source_path`` parameter to copy a directory. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_directory] @@ -85,7 +85,7 @@ Moving specific files Use the ``wildcard`` in ``source_path`` parameter to move the specific files. The ``destination_path`` defines the path that is prefixed to all copied files. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_specific_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst index d8fdcfff3e6b..bd2e47fd6b26 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst @@ -38,7 +38,7 @@ Upload data from GCS to Google Sheets To upload data from Google Cloud Storage to Google Spreadsheet you can use the :class:`~airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py :language: python :dedent: 4 :start-after: [START upload_gcs_to_sheets] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst index c5f3a11808c8..260ec631011f 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst @@ -47,7 +47,7 @@ The following Operator copies a single file from a shared Google Drive folder to Note that you can transfer a file from the root folder of a shared drive by passing the id of the shared drive to both the ``folder_id`` and ``drive_id`` parameters. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_gdrive_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst index 9cc69760a1ef..57b589199d2c 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst @@ -38,7 +38,7 @@ data from Google Drive to local filesystem. Below is an example of using this operator to download file from Google Drive to Local Filesystem. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py :language: python :dedent: 4 :start-after: [START download_from_gdrive_to_local] diff --git a/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst index da2fefcacfe4..117a65dd1cc2 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst @@ -38,7 +38,7 @@ When you use this operator, you can upload a list of files. Below is an example of using this operator to upload data from local filesystem to Google Drive. -.. exampleinclude:: /../../tests/system/providers/google/suite/example_local_to_drive.py +.. exampleinclude:: /../../providers/tests/system/google/suite/example_local_to_drive.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_drive_upload_single_file] diff --git a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst index e12726712beb..c7b215901c5c 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded. Below is an example of using this operator to upload a file to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_local_filesystem_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst index 54ceb15ac1ea..094973306a3e 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst @@ -36,7 +36,7 @@ data from Microsoft SQL Server database to GCS. Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py :language: python :start-after: [START howto_operator_mssql_to_gcs] :end-before: [END howto_operator_mssql_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst index 14bca637c9c5..391f9ae83722 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_mysql_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst index 9760e594beef..538cbce38d65 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_oracle_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst index b6b01ce864a8..2a5c3fac6dda 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_postgres_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst index c462ee9abaa6..538f2dec094d 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst @@ -49,7 +49,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_basic] @@ -67,7 +67,7 @@ You can specify these options by the ``export_format`` parameter. If you want a CSV file to be created, your operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_csv] @@ -81,7 +81,7 @@ will be dumped from the database and upload to the bucket. If you want to create a schema file, then an example operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_multiple_types] @@ -102,7 +102,7 @@ maximum allowed file size for a single object. If you want to create 10 MB files, your code might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_read_data_from_gcs_many_chunks] @@ -123,7 +123,7 @@ For example, if you want to create an external table that allows you to create q read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. Using this operator looks like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_create_external_table_multiple_types] diff --git a/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst index dbd2f603bb9e..9dff4cbb6ab9 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst @@ -32,7 +32,7 @@ Prerequisite Tasks Use the :class:`~airflow.providers.google.cloud.transfers.s3_to_gcs.S3ToGCSOperator` to transfer data from Amazon S3 to Google Cloud Storage. -.. exampleinclude::/../tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +.. exampleinclude::/../providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py :language: python :start-after: [START howto_transfer_s3togcs_operator] :end-before: [END howto_transfer_s3togcs_operator] @@ -42,7 +42,7 @@ There is a possibility to start S3ToGCSOperator asynchronously using deferrable Transfer Service. By changing parameter ``poll_interval=10`` you can control frequency of polling a transfer job status. -.. exampleinclude::/../tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +.. exampleinclude::/../providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py :language: python :start-after: [START howto_transfer_s3togcs_operator_async] :end-before: [END howto_transfer_s3togcs_operator_async] diff --git a/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst index 62cb4c97c6df..94f4180f91a6 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst @@ -32,7 +32,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator` to execute a Salesforce query to fetch data and load it to GCS. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py :language: python :start-after: [START howto_operator_salesforce_to_gcs] :end-before: [END howto_operator_salesforce_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst index 0fd8016b78f4..cf4d237dd68d 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst @@ -46,7 +46,7 @@ Copying single files The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_single_file] @@ -59,7 +59,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to G the original file from the SFTP is deleted. The ``destination_path`` parameter defines the full path of the file in the bucket. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_single_file_destination] @@ -71,7 +71,7 @@ Copying directory Use the ``wildcard`` in ``source_path`` parameter to copy the directory. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_directory] @@ -87,7 +87,7 @@ e.g. ``tests_sftp_hook_dir/subdir/parent-1.bin`` is copied to ``specific_files/p and ``tests_sftp_hook_dir/subdir/parent-2.bin`` is copied to ``specific_files/parent-2.bin`` . ``tests_sftp_hook_dir/subdir/parent-3.txt`` is skipped. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_specific_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst index 862028c544e0..0a3389b79b50 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst @@ -38,7 +38,7 @@ Upload data from Google Sheets to GCS To upload data from Google Spreadsheet to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_sheet_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst b/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst index 5ce4f19f01a8..1758c0fc6a41 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst @@ -35,7 +35,7 @@ Upload data from SQL to Google Sheets To upload data from and Database using SQL to Google Spreadsheet you can use the :class:`~airflow.providers.google.suite.transfers.sql_to_sheets.SQLToGoogleSheetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py :language: python :dedent: 4 :start-after: [START upload_sql_to_sheets] diff --git a/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst index a1177eaad7a5..f881dab7f77d 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst @@ -49,7 +49,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_basic] @@ -67,7 +67,7 @@ You can specify these options by the ``export_format`` parameter. If you want a CSV file to be created, your operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_csv] @@ -81,7 +81,7 @@ will be dumped from the database and upload to the bucket. If you want to create a schema file, then an example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_multiple_types] @@ -102,7 +102,7 @@ maximum allowed file size for a single object. If you want to create 10 MB files, your code might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_read_data_from_gcs_many_chunks] @@ -123,7 +123,7 @@ For example, if you want to create an external table that allows you to create q read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. Using this operator looks like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_create_external_table_multiple_types] diff --git a/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst b/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst index 02dd8602e202..3e015ba06348 100644 --- a/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst +++ b/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst @@ -32,7 +32,7 @@ Google Cloud Tasks Empty Sensor To sense Queue being empty use :class:`~airflow.providers.google.cloud.sensor.tasks.TaskQueueEmptySensor` -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_task.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py :language: python :dedent: 4 :start-after: [START cloud_tasks_empty_sensor] diff --git a/docs/apache-airflow-providers-grpc/changelog.rst b/docs/apache-airflow-providers-grpc/changelog.rst index c5b39cf9bfc9..7c93191ed4f0 100644 --- a/docs/apache-airflow-providers-grpc/changelog.rst +++ b/docs/apache-airflow-providers-grpc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/grpc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/grpc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-hashicorp/changelog.rst b/docs/apache-airflow-providers-hashicorp/changelog.rst index 7e670c6443e3..a2ab3e789158 100644 --- a/docs/apache-airflow-providers-hashicorp/changelog.rst +++ b/docs/apache-airflow-providers-hashicorp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/hashicorp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/hashicorp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-http/changelog.rst b/docs/apache-airflow-providers-http/changelog.rst index 31947ff163d8..631df66a668d 100644 --- a/docs/apache-airflow-providers-http/changelog.rst +++ b/docs/apache-airflow-providers-http/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/http/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/http/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst index c99999083f0d..2e7bdf6b78d8 100644 --- a/docs/apache-airflow-providers-http/index.rst +++ b/docs/apache-airflow-providers-http/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/http/index> + System Tests <_api/tests/system/http/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-http/operators.rst b/docs/apache-airflow-providers-http/operators.rst index 3f52ca0a62d5..87473dc48086 100644 --- a/docs/apache-airflow-providers-http/operators.rst +++ b/docs/apache-airflow-providers-http/operators.rst @@ -32,14 +32,14 @@ to ``true``. Here we are poking until httpbin gives us a response text containing ``httpbin``. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_http_sensor_check] :end-before: [END howto_operator_http_http_sensor_check] This sensor can also be used in deferrable mode -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_http_sensor_check_deferrable] :end-before: [END howto_operator_http_http_sensor_check_deferrable] @@ -76,14 +76,14 @@ the response text back. In the first example we are calling a ``POST`` with json data and succeed when we get the same json data back otherwise the task will fail. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_post_op] :end-before: [END howto_operator_http_task_post_op] Here we are calling a ``GET`` request and pass params to it. The task will succeed regardless of the response text. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_get_op] :end-before: [END howto_operator_http_task_get_op] @@ -98,7 +98,7 @@ it on the next task downstream use ``response_filter``. This is useful if: Below is an example of retrieving data from a REST API and only returning a nested property instead of the full response body. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_get_op_response_filter] :end-before: [END howto_operator_http_task_get_op_response_filter] @@ -106,7 +106,7 @@ response body. In the third example we are performing a ``PUT`` operation to put / set data according to the data that is being provided to the request. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_put_op] :end-before: [END howto_operator_http_task_put_op] @@ -114,14 +114,14 @@ provided to the request. In this example we call a ``DELETE`` operation to the ``delete`` endpoint. This time we are passing form data to the request. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_del_op] :end-before: [END howto_operator_http_task_del_op] Here we pass form data to a ``POST`` operation which is equal to a usual form submit. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_post_op_formenc] :end-before: [END howto_operator_http_task_post_op_formenc] @@ -140,7 +140,7 @@ You can write a ``pagination_function`` that will receive the raw ``request.Resp generate new request parameters (as ``dict``) based on this cursor. The HttpOperator will repeat calls to the API until the function stop returning anything. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_pagination_function] :end-before: [END howto_operator_http_pagination_function] diff --git a/docs/apache-airflow-providers-imap/changelog.rst b/docs/apache-airflow-providers-imap/changelog.rst index f10bae15755c..1cfb71752efa 100644 --- a/docs/apache-airflow-providers-imap/changelog.rst +++ b/docs/apache-airflow-providers-imap/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/imap/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/imap/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-influxdb/changelog.rst b/docs/apache-airflow-providers-influxdb/changelog.rst index 32fe8a7a55e3..e1798cda0d09 100644 --- a/docs/apache-airflow-providers-influxdb/changelog.rst +++ b/docs/apache-airflow-providers-influxdb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/influxdb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/influxdb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-influxdb/index.rst b/docs/apache-airflow-providers-influxdb/index.rst index 8df098b8481c..3ab429376c05 100644 --- a/docs/apache-airflow-providers-influxdb/index.rst +++ b/docs/apache-airflow-providers-influxdb/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/influxdb/index> + System Tests <_api/tests/system/influxdb/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-influxdb/operators/index.rst b/docs/apache-airflow-providers-influxdb/operators/index.rst index b3d0350477dc..d05fb6d1d915 100644 --- a/docs/apache-airflow-providers-influxdb/operators/index.rst +++ b/docs/apache-airflow-providers-influxdb/operators/index.rst @@ -27,7 +27,7 @@ SQL commands in a `InfluxDB `__ database. An example of running the query using the operator: -.. exampleinclude:: /../../tests/system/providers/influxdb/example_influxdb_query.py +.. exampleinclude:: /../../providers/tests/system/influxdb/example_influxdb_query.py :language: python :start-after: [START howto_operator_influxdb] :end-before: [END howto_operator_influxdb] diff --git a/docs/apache-airflow-providers-jdbc/changelog.rst b/docs/apache-airflow-providers-jdbc/changelog.rst index 6697e52b7f0b..916dba68d1b1 100644 --- a/docs/apache-airflow-providers-jdbc/changelog.rst +++ b/docs/apache-airflow-providers-jdbc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/jdbc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/jdbc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index e31369841cd7..3010c1e5d080 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/jdbc/index> + System Tests <_api/tests/system/jdbc/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-jdbc/operators.rst b/docs/apache-airflow-providers-jdbc/operators.rst index ea7297635b91..eba54d065d0c 100644 --- a/docs/apache-airflow-providers-jdbc/operators.rst +++ b/docs/apache-airflow-providers-jdbc/operators.rst @@ -73,7 +73,7 @@ commands against a database (or data storage) accessible via a JDBC driver. The :doc:`JDBC Connection ` must be passed as ``conn_id``. -.. exampleinclude:: /../../tests/system/providers/jdbc/example_jdbc_queries.py +.. exampleinclude:: /../../providers/tests/system/jdbc/example_jdbc_queries.py :language: python :start-after: [START howto_operator_jdbc] :end-before: [END howto_operator_jdbc] @@ -91,7 +91,7 @@ Templating You can use :ref:`Jinja templates ` to parameterize ``sql``. -.. exampleinclude:: /../../tests/system/providers/jdbc/example_jdbc_queries.py +.. exampleinclude:: /../../providers/tests/system/jdbc/example_jdbc_queries.py :language: python :start-after: [START howto_operator_jdbc_template] :end-before: [END howto_operator_jdbc_template] diff --git a/docs/apache-airflow-providers-jenkins/changelog.rst b/docs/apache-airflow-providers-jenkins/changelog.rst index 4f556ba51445..40dd4c7db5d9 100644 --- a/docs/apache-airflow-providers-jenkins/changelog.rst +++ b/docs/apache-airflow-providers-jenkins/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/jenkins/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/jenkins/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst index d890406508df..1aa841a5d8f8 100644 --- a/docs/apache-airflow-providers-jenkins/index.rst +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/jenkins/index> + System Tests <_api/tests/system/jenkins/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-azure/changelog.rst b/docs/apache-airflow-providers-microsoft-azure/changelog.rst index 4456c720c884..8b73842cf5a9 100644 --- a/docs/apache-airflow-providers-microsoft-azure/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-azure/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/azure/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index 7a073a3b7234..5eb7c53813f3 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -54,14 +54,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/azure/index> + System Tests <_api/tests/system/microsoft/azure/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst index 024873d597a5..0feb5802093a 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst @@ -31,7 +31,7 @@ This functionality can be disabled for an asynchronous wait -- typically with th Below is an example of using this operator to execute an Azure Data Factory pipeline. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline] @@ -40,7 +40,7 @@ Below is an example of using this operator to execute an Azure Data Factory pipe Below is an example of using this operator to execute an Azure Data Factory pipeline with a deferrable flag so that polling for the status of the pipeline run occurs on the Airflow Triggerer. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_adf_run_pipeline_with_deferrable_flag] @@ -48,7 +48,7 @@ so that polling for the status of the pipeline run occurs on the Airflow Trigger Here is a different example of using this operator to execute a pipeline but coupled with the :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor` to perform an asynchronous wait. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline_async] @@ -56,7 +56,7 @@ Here is a different example of using this operator to execute a pipeline but cou Also you can use deferrable mode in :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor` if you would like to free up the worker slots while the sensor is running. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline_async] @@ -72,7 +72,7 @@ status of a data factory pipeline run asynchronously. This sensor will free up t polling for job status happens on the Airflow triggerer, leading to efficient utilization of resources within Airflow. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_adf_run_pipeline_async] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst index 24b6a3ac5603..4209bcf3096a 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst @@ -35,7 +35,7 @@ upload data to Azure DataLake Storage Below is an example of using this operator to upload data to ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_create.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_create.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_create] @@ -52,7 +52,7 @@ file(s) from Azure DataLake Storage Below is an example of using this operator to delete a file from ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_delete.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_delete.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_delete] @@ -69,7 +69,7 @@ file(s) from Azure DataLake Storage Below is an example of using this operator to list files from ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_list.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_list.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_list] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst b/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst index 9adbbddc4c92..c7cdfec9e759 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst @@ -41,7 +41,7 @@ To create Azure service bus queue with specific Parameter you can use Below is an example of using this operator to execute an Azure Service Bus Create Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_queue] @@ -58,7 +58,7 @@ To Send message or list of message or batch Message to the Azure Service Bus Que Below is an example of using this operator to execute an Azure Service Bus Send Message to Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_send_message_to_service_bus_queue] @@ -75,7 +75,7 @@ To Receive Message or list of message or Batch message in a Queue you can use Below is an example of using this operator to execute an Azure Service Bus Create Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_receive_message_service_bus_queue] @@ -92,7 +92,7 @@ To Delete the Azure service bus queue you can use Below is an example of using this operator to execute an Azure Service Bus Delete Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_queue] @@ -113,7 +113,7 @@ To create Azure service bus topic with specific Parameter you can use Below is an example of using this operator to execute an Azure Service Bus Create Topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_topic] @@ -129,7 +129,7 @@ To Delete the Azure service bus topic you can use Below is an example of using this operator to execute an Azure Service Bus Delete topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_topic] @@ -150,7 +150,7 @@ To create Azure service bus topic Subscription with specific Parameter you can u Below is an example of using this operator to execute an Azure Service Bus Create Subscription. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_subscription] @@ -166,7 +166,7 @@ To Update the Azure service bus topic Subscription which is already created, wit Below is an example of using this operator to execute an Azure Service Bus Update Subscription. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_update_service_bus_subscription] @@ -182,7 +182,7 @@ To Receive a Batch messages from a Service Bus Subscription under specific Topic Below is an example of using this operator to execute an Azure Service Bus Receive Subscription Message. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_receive_message_service_bus_subscription] @@ -198,7 +198,7 @@ To Delete the Azure service bus topic Subscription you can use Below is an example of using this operator to execute an Azure Service Bus Delete Subscription under topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_subscription] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst b/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst index 440e173d04a7..f3a2a3affd76 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst @@ -34,7 +34,7 @@ terminate with a "Succeeded" status. Below is an example of using this operator to execute a Spark application on Azure Synapse. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_synapse.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_synapse.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_synapse] @@ -46,7 +46,7 @@ AzureSynapseRunPipelineOperator Use the: class:`~airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunPipelineOperator` to execute a pipeline application within Synapse Analytics. The operator will Execute a Synapse Pipeline. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_synapse_run_pipeline] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst b/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst index 5fa5a3808d90..32d40ecbe5ba 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst @@ -26,7 +26,7 @@ Use the Below is an example of using this operator to trigger a task on Azure Batch -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_batch_operator.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_batch_operator.py :language: python :dedent: 0 :start-after: [START howto_azure_batch_operator] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst b/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst index 56a9259f9314..a91a833855f7 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst @@ -34,7 +34,7 @@ Use the Below is an example of using this operator to get a Sharepoint site. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msgraph.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msgraph.py :language: python :dedent: 0 :start-after: [START howto_operator_graph_site] @@ -42,7 +42,7 @@ Below is an example of using this operator to get a Sharepoint site. Below is an example of using this operator to get a Sharepoint site pages. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msgraph.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msgraph.py :language: python :dedent: 0 :start-after: [START howto_operator_graph_site_pages] @@ -50,7 +50,7 @@ Below is an example of using this operator to get a Sharepoint site pages. Below is an example of using this operator to get PowerBI workspaces. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_workspaces] @@ -58,7 +58,7 @@ Below is an example of using this operator to get PowerBI workspaces. Below is an example of using this operator to get PowerBI workspaces info. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_workspaces_info] @@ -66,7 +66,7 @@ Below is an example of using this operator to get PowerBI workspaces info. Below is an example of using this operator to refresh PowerBI dataset. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_refresh_dataset] @@ -74,7 +74,7 @@ Below is an example of using this operator to refresh PowerBI dataset. Below is an example of using this operator to create an item schedule in Fabric. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msfabric.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msfabric.py :language: python :dedent: 0 :start-after: [START howto_operator_ms_fabric_create_item_schedule] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst index c0a7d140cc22..f15064d4852f 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst @@ -28,7 +28,7 @@ Azure Cosmos Document Sensor Checks for the existence of a document which matches the given query in CosmosDB. :class:`~airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_cosmosdb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_cosmosdb.py :language: python :dedent: 4 :start-after: [START cosmos_document_sensor] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst index 4ddad88f19fa..f3a6e5c8c99e 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst @@ -27,7 +27,7 @@ Use the Below is an example of using this sensor to poll the status of a PowerBI workspace. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_sensor_powerbi_scan_status] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst index c01adeab1a89..2f79f6df6b20 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst @@ -29,7 +29,7 @@ Wasb Blob Sensor Waits for a blob to arrive on Azure Blob Storage. :class:`~airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_wasb_sensors.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_wasb_sensors.py :language: python :dedent: 4 :start-after: [START wasb_blob_sensor] @@ -41,7 +41,7 @@ Wasb Prefix Sensor Waits for blobs matching a prefix to arrive on Azure Blob Storage. :class:`~airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_wasb_sensors.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_wasb_sensors.py :language: python :dedent: 4 :start-after: [START wasb_prefix_sensor] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst index 76a06133d21d..19f4c36d15c2 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst @@ -38,7 +38,7 @@ upload data from local filesystem to ADL. Below is an example of using this operator to upload a file to ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_adls.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_local_to_adls.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_adls] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst index 970a6298342e..fed25a5797ae 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst @@ -36,7 +36,7 @@ upload data from local filesystem to Azure Blob Storage. Below is an example of using this operator to upload a file to Azure Blob Storage. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_local_to_wasb.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst index 59a394b0935f..e20661138953 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst @@ -47,7 +47,7 @@ To copy data from an Amazon AWS S3 Bucket to an Azure Blob Storage container, th Example usage: -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_s3_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_s3_to_wasb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst index a79500de6a3b..a4699655a3d3 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst @@ -54,7 +54,7 @@ To get information about jobs within a Azure Blob Storage use: :class:`~airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator` Example usage: -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_sftp_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_sftp_to_wasb.py :language: python :dedent: 4 :start-after: [START how_to_sftp_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-mssql/changelog.rst b/docs/apache-airflow-providers-microsoft-mssql/changelog.rst index d7b97646ce1a..5c5b6c990874 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/mssql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-mssql/index.rst b/docs/apache-airflow-providers-microsoft-mssql/index.rst index a1032303afe2..f14c3caf7f2d 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/index.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/mssql/index> + System Tests <_api/tests/system/microsoft/mssql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-mssql/operators.rst b/docs/apache-airflow-providers-microsoft-mssql/operators.rst index 9d2189c7719b..560e196731b5 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/operators.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/operators.rst @@ -41,7 +41,7 @@ The code snippets below are based on Airflow-2.2 An example usage of the SQLExecuteQueryOperator to connect to MSSQL is as follows: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START howto_operator_mssql] :end-before: [END howto_operator_mssql] @@ -49,7 +49,7 @@ An example usage of the SQLExecuteQueryOperator to connect to MSSQL is as follow You can also use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. This way you can easily maintain the SQL queries separated from the code. -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_create_table_mssql_from_external_file] :end-before: [END mssql_operator_howto_guide_create_table_mssql_from_external_file] @@ -71,7 +71,7 @@ Inserting data into a MSSQL database table --------------------------------------------- We can then create a SQLExecuteQueryOperator task that populate the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_populate_user_table] :end-before: [END mssql_operator_howto_guide_populate_user_table] @@ -82,7 +82,7 @@ Fetching records from your MSSQL database table Fetching records from your MSSQL database table can be as simple as: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_get_all_countries] :end-before: [END mssql_operator_howto_guide_get_all_countries] @@ -96,7 +96,7 @@ SQL requests during runtime. To find the countries in Asian continent: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_params_passing_get_query] :end-before: [END mssql_operator_howto_guide_params_passing_get_query] @@ -107,7 +107,7 @@ The complete SQLExecuteQueryOperator DAG to connect to MSSQL When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide] :end-before: [END mssql_operator_howto_guide] diff --git a/docs/apache-airflow-providers-microsoft-psrp/changelog.rst b/docs/apache-airflow-providers-microsoft-psrp/changelog.rst index bba78b70734f..ed4648eded19 100644 --- a/docs/apache-airflow-providers-microsoft-psrp/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-psrp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/psrp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-winrm/changelog.rst b/docs/apache-airflow-providers-microsoft-winrm/changelog.rst index 05d79981df50..fb0faf44d10f 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/winrm/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst index b3b7cc0afcfb..6bc6954fe11e 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/index.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/winrm/index> + System Tests <_api/tests/system/microsoft/winrm/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-winrm/operators.rst b/docs/apache-airflow-providers-microsoft-winrm/operators.rst index e70d124f6262..2e7fdc663348 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/operators.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/operators.rst @@ -22,7 +22,7 @@ use the WinRMOperator to execute commands on a given remote host using the winrm create a hook -.. exampleinclude:: /../../tests/system/providers/microsoft/winrm/example_winrm.py +.. exampleinclude:: /../../providers/tests/system/microsoft/winrm/example_winrm.py :language: python :dedent: 4 :start-after: [START create_hook] @@ -30,7 +30,7 @@ create a hook Run the operator, pass the hook, and pass a command to do something -.. exampleinclude:: /../../tests/system/providers/microsoft/winrm/example_winrm.py +.. exampleinclude:: /../../providers/tests/system/microsoft/winrm/example_winrm.py :language: python :dedent: 4 :start-after: [START run_operator] diff --git a/docs/apache-airflow-providers-mongo/changelog.rst b/docs/apache-airflow-providers-mongo/changelog.rst index dc2b2dead5d6..a1714d0e2b49 100644 --- a/docs/apache-airflow-providers-mongo/changelog.rst +++ b/docs/apache-airflow-providers-mongo/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/mongo/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/mongo/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-mysql/changelog.rst b/docs/apache-airflow-providers-mysql/changelog.rst index 40b5da4fbad9..807ca2ce0730 100644 --- a/docs/apache-airflow-providers-mysql/changelog.rst +++ b/docs/apache-airflow-providers-mysql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/mysql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/mysql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index 622084d88957..d8d612913412 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/mysql/index> + System Tests <_api/tests/system/mysql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-mysql/operators.rst b/docs/apache-airflow-providers-mysql/operators.rst index 2ee7b885cd7b..55b372782297 100644 --- a/docs/apache-airflow-providers-mysql/operators.rst +++ b/docs/apache-airflow-providers-mysql/operators.rst @@ -53,14 +53,14 @@ the connection metadata is structured as follows: An example usage of the SQLExecuteQueryOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/mysql/example_mysql.py +.. exampleinclude:: /../../providers/tests/system/mysql/example_mysql.py :language: python :start-after: [START howto_operator_mysql] :end-before: [END howto_operator_mysql] You can also use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. -.. exampleinclude:: /../../tests/system/providers/mysql/example_mysql.py +.. exampleinclude:: /../../providers/tests/system/mysql/example_mysql.py :language: python :start-after: [START howto_operator_mysql_external_file] :end-before: [END howto_operator_mysql_external_file] diff --git a/docs/apache-airflow-providers-neo4j/changelog.rst b/docs/apache-airflow-providers-neo4j/changelog.rst index 124d526faa22..8b3d87f653f3 100644 --- a/docs/apache-airflow-providers-neo4j/changelog.rst +++ b/docs/apache-airflow-providers-neo4j/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/neo4j/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/neo4j/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst index 83b78bcd892c..1b78a2bdba8b 100644 --- a/docs/apache-airflow-providers-neo4j/index.rst +++ b/docs/apache-airflow-providers-neo4j/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/neo4j/index> + System Tests <_api/tests/system/neo4j/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst index 5bc2362e9108..14f3e3da2c93 100644 --- a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst +++ b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst @@ -49,7 +49,7 @@ the connection metadata is structured as follows: * - Port: int - Neo4j port -.. exampleinclude:: /../../tests/system/providers/neo4j/example_neo4j.py +.. exampleinclude:: /../../providers/tests/system/neo4j/example_neo4j.py :language: python :dedent: 4 :start-after: [START run_query_neo4j_operator] diff --git a/docs/apache-airflow-providers-odbc/changelog.rst b/docs/apache-airflow-providers-odbc/changelog.rst index 1c24fdd8fe6b..6028c91353b1 100644 --- a/docs/apache-airflow-providers-odbc/changelog.rst +++ b/docs/apache-airflow-providers-odbc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/odbc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/odbc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openai/changelog.rst b/docs/apache-airflow-providers-openai/changelog.rst index 64206f909af4..50402f1c9929 100644 --- a/docs/apache-airflow-providers-openai/changelog.rst +++ b/docs/apache-airflow-providers-openai/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openai/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openai/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openai/index.rst b/docs/apache-airflow-providers-openai/index.rst index ac0130e9d93e..8e07d82ca6c1 100644 --- a/docs/apache-airflow-providers-openai/index.rst +++ b/docs/apache-airflow-providers-openai/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/openai/index> + System Tests <_api/tests/system/openai/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-openai/operators/openai.rst b/docs/apache-airflow-providers-openai/operators/openai.rst index fef8521188df..d82a8058f298 100644 --- a/docs/apache-airflow-providers-openai/operators/openai.rst +++ b/docs/apache-airflow-providers-openai/operators/openai.rst @@ -32,7 +32,7 @@ connect to your account. An example using the operator is in way: -.. exampleinclude:: /../../tests/system/providers/openai/example_openai.py +.. exampleinclude:: /../../providers/tests/system/openai/example_openai.py :language: python :start-after: [START howto_operator_openai_embedding] :end-before: [END howto_operator_openai_embedding] @@ -57,7 +57,7 @@ The OpenAITriggerBatchOperator An example using the operator is in way: -.. exampleinclude:: /../../tests/system/providers/openai/example_trigger_batch_operator.py +.. exampleinclude:: /../../providers/tests/system/openai/example_trigger_batch_operator.py :language: python :start-after: [START howto_operator_openai_trigger_operator] :end-before: [END howto_operator_openai_trigger_operator] diff --git a/docs/apache-airflow-providers-openfaas/changelog.rst b/docs/apache-airflow-providers-openfaas/changelog.rst index 48653876678d..507f650340ba 100644 --- a/docs/apache-airflow-providers-openfaas/changelog.rst +++ b/docs/apache-airflow-providers-openfaas/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openfaas/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openfaas/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openlineage/changelog.rst b/docs/apache-airflow-providers-openlineage/changelog.rst index 9c9060dd21a8..f410ab7a2eaf 100644 --- a/docs/apache-airflow-providers-openlineage/changelog.rst +++ b/docs/apache-airflow-providers-openlineage/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openlineage/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openlineage/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openlineage/guides/developer.rst b/docs/apache-airflow-providers-openlineage/guides/developer.rst index ccab215fc184..d826ccde9a5e 100644 --- a/docs/apache-airflow-providers-openlineage/guides/developer.rst +++ b/docs/apache-airflow-providers-openlineage/guides/developer.rst @@ -290,7 +290,7 @@ To learn more about how Operators and Extractors work together under the hood, c When testing an Extractor, we want to firstly verify if ``OperatorLineage`` object is being created, specifically verifying that the object is being built with the correct input and output datasets and relevant facets. This is done in OpenLineage via pytest, with appropriate mocking and patching for connections and objects. -Check out `example tests `_. +Check out `example tests `_. Testing each facet is also important, as data or graphs in the UI can render incorrectly if the facets are wrong. For example, if the facet name is created incorrectly in the Extractor, then the Operator's task will not show up in the lineage graph, diff --git a/docs/apache-airflow-providers-opensearch/changelog.rst b/docs/apache-airflow-providers-opensearch/changelog.rst index 2ce00fd5fdcf..21f39a6c1f6e 100644 --- a/docs/apache-airflow-providers-opensearch/changelog.rst +++ b/docs/apache-airflow-providers-opensearch/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/opensearch/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/opensearch/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-opensearch/index.rst b/docs/apache-airflow-providers-opensearch/index.rst index de79657141fa..efa238c993ef 100644 --- a/docs/apache-airflow-providers-opensearch/index.rst +++ b/docs/apache-airflow-providers-opensearch/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/opensearch/index> + System Tests <_api/tests/system/opensearch/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-opensearch/operators/opensearch.rst b/docs/apache-airflow-providers-opensearch/operators/opensearch.rst index cd39a59566fb..b85a014ebefe 100644 --- a/docs/apache-airflow-providers-opensearch/operators/opensearch.rst +++ b/docs/apache-airflow-providers-opensearch/operators/opensearch.rst @@ -35,7 +35,7 @@ to create a new index in an OpenSearch domain. -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_create_index] :dedent: 4 @@ -50,7 +50,7 @@ Add a Document to an Index on OpenSearch Use :class:`~airflow.providers.opensearch.operators.opensearch.OpenSearchAddDocumentOperator` to add single documents to an OpenSearch Index -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_add_document] :dedent: 4 @@ -65,7 +65,7 @@ Run a query against an OpenSearch Index Use :class:`~airflow.providers.opensearch.operators.opensearch.OpenSearchQueryOperator` to run a query against an OpenSearch index. -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_query] :dedent: 4 diff --git a/docs/apache-airflow-providers-opsgenie/changelog.rst b/docs/apache-airflow-providers-opsgenie/changelog.rst index 4c9f227f1dd6..abb3ab86138c 100644 --- a/docs/apache-airflow-providers-opsgenie/changelog.rst +++ b/docs/apache-airflow-providers-opsgenie/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/opsgenie/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/opsgenie/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-opsgenie/index.rst b/docs/apache-airflow-providers-opsgenie/index.rst index b09e294bbc85..30b35890e97c 100644 --- a/docs/apache-airflow-providers-opsgenie/index.rst +++ b/docs/apache-airflow-providers-opsgenie/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/opsgenie/index> + System Tests <_api/tests/system/opsgenie/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst b/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst index 54cff970d4a6..2172be24faa4 100644 --- a/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst +++ b/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst @@ -27,7 +27,7 @@ Using the Notifier ^^^^^^^^^^^^^^^^^^ Send an alert to Opsgenie with a specific message. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_notifier.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_notifier.py :language: python :start-after: [START howto_notifier_opsgenie] :end-before: [END howto_notifier_opsgenie] diff --git a/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst b/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst index 5ba7cb6a8952..ebed7548c4f0 100644 --- a/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst +++ b/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst @@ -27,7 +27,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Send an alert to Opsgenie with a specific message. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :start-after: [START howto_opsgenie_create_alert_operator] :end-before: [END howto_opsgenie_create_alert_operator] @@ -44,7 +44,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Close alert in Opsgenie. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :start-after: [START howto_opsgenie_close_alert_operator] :end-before: [END howto_opsgenie_close_alert_operator] @@ -61,7 +61,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Delete alert in Opsgenie. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :dedent: 4 :start-after: [START howto_opsgenie_delete_alert_operator] diff --git a/docs/apache-airflow-providers-oracle/changelog.rst b/docs/apache-airflow-providers-oracle/changelog.rst index 4711449e6c6c..b6cdc5037ba7 100644 --- a/docs/apache-airflow-providers-oracle/changelog.rst +++ b/docs/apache-airflow-providers-oracle/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/oracle/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/oracle/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-oracle/operators/index.rst b/docs/apache-airflow-providers-oracle/operators/index.rst index af440c7d19b3..990cb2e6503b 100644 --- a/docs/apache-airflow-providers-oracle/operators/index.rst +++ b/docs/apache-airflow-providers-oracle/operators/index.rst @@ -30,7 +30,7 @@ To execute arbitrary SQL in an Oracle database, use the An example of executing a simple query is as follows: -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_operator] :end-before: [END howto_oracle_operator] @@ -58,7 +58,7 @@ a single integer argument, val_out. This can be represented with the following call using :class:`~airflow.providers.oracle.operators.oracle.OracleStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_stored_procedure_operator_with_list_inout] :end-before: [END howto_oracle_stored_procedure_operator_with_list_inout] @@ -67,7 +67,7 @@ with parameters passed positionally as a list: Alternatively, parameters can be passed as keyword arguments using a dictionary as well. -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_stored_procedure_operator_with_dict_inout] :end-before: [END howto_oracle_stored_procedure_operator_with_dict_inout] diff --git a/docs/apache-airflow-providers-pagerduty/changelog.rst b/docs/apache-airflow-providers-pagerduty/changelog.rst index ddd707b84f9f..049844772748 100644 --- a/docs/apache-airflow-providers-pagerduty/changelog.rst +++ b/docs/apache-airflow-providers-pagerduty/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pagerduty/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pagerduty/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-papermill/changelog.rst b/docs/apache-airflow-providers-papermill/changelog.rst index c022939a4009..3c11af574b2f 100644 --- a/docs/apache-airflow-providers-papermill/changelog.rst +++ b/docs/apache-airflow-providers-papermill/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/papermill/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/papermill/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst index b32d63bc9c89..5217f7119e33 100644 --- a/docs/apache-airflow-providers-papermill/index.rst +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/papermill/index> + System Tests <_api/tests/system/papermill/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-papermill/operators.rst b/docs/apache-airflow-providers-papermill/operators.rst index ed1cf580c85a..fd17fba35060 100644 --- a/docs/apache-airflow-providers-papermill/operators.rst +++ b/docs/apache-airflow-providers-papermill/operators.rst @@ -50,7 +50,7 @@ Example DAG Use the :class:`~airflow.providers.papermill.operators.papermill.PapermillOperator` to execute a jupyter notebook: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill.py :language: python :dedent: 4 :start-after: [START howto_operator_papermill] @@ -58,7 +58,7 @@ to execute a jupyter notebook: Example DAG to Verify the message in the notebook: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill_verify.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill_verify.py :language: python :start-after: [START howto_verify_operator_papermill] :end-before: [END howto_verify_operator_papermill] @@ -66,7 +66,7 @@ Example DAG to Verify the message in the notebook: Example DAG to Verify the message in the notebook using a remote jupyter kernel: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill_remote_verify.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill_remote_verify.py :language: python :start-after: [START howto_verify_operator_papermill_remote_kernel] :end-before: [END howto_verify_operator_papermill_remote_kernel] diff --git a/docs/apache-airflow-providers-pgvector/changelog.rst b/docs/apache-airflow-providers-pgvector/changelog.rst index 9e33a9e9e606..15c377d7cfc8 100644 --- a/docs/apache-airflow-providers-pgvector/changelog.rst +++ b/docs/apache-airflow-providers-pgvector/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pgvector/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pgvector/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-pgvector/index.rst b/docs/apache-airflow-providers-pgvector/index.rst index a7b280a9981e..18d69634b876 100644 --- a/docs/apache-airflow-providers-pgvector/index.rst +++ b/docs/apache-airflow-providers-pgvector/index.rst @@ -60,7 +60,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/pgvector/index> + System Tests <_api/tests/system/pgvector/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-pgvector/operators/pgvector.rst b/docs/apache-airflow-providers-pgvector/operators/pgvector.rst index dfee1c9fef2a..b8ef6b27fcce 100644 --- a/docs/apache-airflow-providers-pgvector/operators/pgvector.rst +++ b/docs/apache-airflow-providers-pgvector/operators/pgvector.rst @@ -36,7 +36,7 @@ See https://github.com/pgvector/pgvector#installation for installation instructi An example using the operator to ingest data is shown below: -.. exampleinclude:: /../../tests/system/providers/pgvector/example_pgvector.py +.. exampleinclude:: /../../providers/tests/system/pgvector/example_pgvector.py :language: python :start-after: [START howto_operator_pgvector_ingest] :end-before: [END howto_operator_pgvector_ingest] diff --git a/docs/apache-airflow-providers-pinecone/changelog.rst b/docs/apache-airflow-providers-pinecone/changelog.rst index 73d9f14c9a67..4d21af53ed85 100644 --- a/docs/apache-airflow-providers-pinecone/changelog.rst +++ b/docs/apache-airflow-providers-pinecone/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pinecone/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pinecone/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-pinecone/index.rst b/docs/apache-airflow-providers-pinecone/index.rst index ea00879975c9..8c329abde18e 100644 --- a/docs/apache-airflow-providers-pinecone/index.rst +++ b/docs/apache-airflow-providers-pinecone/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/pinecone/index> + System Tests <_api/tests/system/pinecone/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-pinecone/operators/pinecone.rst b/docs/apache-airflow-providers-pinecone/operators/pinecone.rst index b50e5300f09a..0f8e49442fe5 100644 --- a/docs/apache-airflow-providers-pinecone/operators/pinecone.rst +++ b/docs/apache-airflow-providers-pinecone/operators/pinecone.rst @@ -36,7 +36,7 @@ the original text corresponding to the vectors that could be ingested into the d An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_dag_pinecone.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_dag_pinecone.py :language: python :dedent: 4 :start-after: [START howto_operator_pinecone_ingest] @@ -58,7 +58,7 @@ passed via arguments to the operator or via the connection. An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_create_pod_index.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_create_pod_index.py :language: python :dedent: 4 :start-after: [START howto_operator_create_pod_index] @@ -81,7 +81,7 @@ passed via arguments to the operator or via the connection. An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_create_serverless_index.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_create_serverless_index.py :language: python :dedent: 4 :start-after: [START howto_operator_create_serverless_index] diff --git a/docs/apache-airflow-providers-postgres/changelog.rst b/docs/apache-airflow-providers-postgres/changelog.rst index f82d2144e7b0..d2d34f24db82 100644 --- a/docs/apache-airflow-providers-postgres/changelog.rst +++ b/docs/apache-airflow-providers-postgres/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/postgres/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/postgres/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst index 6fbd69ba1c5a..2864276f3789 100644 --- a/docs/apache-airflow-providers-postgres/index.rst +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/postgres/index> + System Tests <_api/tests/system/postgres/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst b/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst index 09402178aa05..d1b20dcea172 100644 --- a/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst +++ b/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst @@ -45,7 +45,7 @@ Creating a Postgres database table The code snippets below are based on Airflow-2.0 -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide] :end-before: [END postgres_sql_execute_query_operator_howto_guide_create_pet_table] @@ -181,7 +181,7 @@ SQLExecuteQueryOperator provides ``hook_params`` attribute that allows you to pa You can pass ``options`` argument this way so that you specify `command-line options `_ sent to the server at connection start. -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide_get_birth_date] :end-before: [END postgres_sql_execute_query_operator_howto_guide_get_birth_date] @@ -192,7 +192,7 @@ The complete Postgres Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide] :end-before: [END postgres_sql_execute_query_operator_howto_guide] diff --git a/docs/apache-airflow-providers-presto/changelog.rst b/docs/apache-airflow-providers-presto/changelog.rst index e430ef542f53..7cd5d865d7c2 100644 --- a/docs/apache-airflow-providers-presto/changelog.rst +++ b/docs/apache-airflow-providers-presto/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/presto/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/presto/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-presto/index.rst b/docs/apache-airflow-providers-presto/index.rst index 9d814fde8d91..a67acce0f81b 100644 --- a/docs/apache-airflow-providers-presto/index.rst +++ b/docs/apache-airflow-providers-presto/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/presto/index> + System Tests <_api/tests/system/presto/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst b/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst index de717cdaee8d..d85844956039 100644 --- a/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst +++ b/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst @@ -39,7 +39,7 @@ This operator assumes that CSV does not have headers and the data is correspondi pre-existing presto table. Optionally, you can provide schema as tuple/list of strings or as a path to a JSON file in the same bucket as the CSV file. -.. exampleinclude:: /../../tests/system/providers/presto/example_gcs_to_presto.py +.. exampleinclude:: /../../providers/tests/system/presto/example_gcs_to_presto.py :language: python :dedent: 4 :start-after: [START gcs_csv_to_presto_table] diff --git a/docs/apache-airflow-providers-qdrant/changelog.rst b/docs/apache-airflow-providers-qdrant/changelog.rst index 56dd0a8b55a8..b6079158bc81 100644 --- a/docs/apache-airflow-providers-qdrant/changelog.rst +++ b/docs/apache-airflow-providers-qdrant/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/qdrant/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/qdrant/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-qdrant/index.rst b/docs/apache-airflow-providers-qdrant/index.rst index 774dbaab014a..32ce6934c872 100644 --- a/docs/apache-airflow-providers-qdrant/index.rst +++ b/docs/apache-airflow-providers-qdrant/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/qdrant/index> + System Tests <_api/tests/system/qdrant/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-qdrant/operators/qdrant.rst b/docs/apache-airflow-providers-qdrant/operators/qdrant.rst index 22d82d5958f1..5e7bbda10a64 100644 --- a/docs/apache-airflow-providers-qdrant/operators/qdrant.rst +++ b/docs/apache-airflow-providers-qdrant/operators/qdrant.rst @@ -33,7 +33,7 @@ the original text corresponding to the vectors that could be ingested into the d An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/qdrant/example_dag_qdrant.py +.. exampleinclude:: /../../providers/tests/system/qdrant/example_dag_qdrant.py :language: python :dedent: 4 :start-after: [START howto_operator_qdrant_ingest] diff --git a/docs/apache-airflow-providers-redis/changelog.rst b/docs/apache-airflow-providers-redis/changelog.rst index 2fe7da2668e9..fa694a4a9b8d 100644 --- a/docs/apache-airflow-providers-redis/changelog.rst +++ b/docs/apache-airflow-providers-redis/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/redis/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/redis/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-redis/index.rst b/docs/apache-airflow-providers-redis/index.rst index 1231aa2df3e9..fe9b374f6883 100644 --- a/docs/apache-airflow-providers-redis/index.rst +++ b/docs/apache-airflow-providers-redis/index.rst @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources @@ -58,7 +58,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/redis/index> + System Tests <_api/tests/system/redis/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-salesforce/changelog.rst b/docs/apache-airflow-providers-salesforce/changelog.rst index 192eeeb3d5ed..ba7063ba2d81 100644 --- a/docs/apache-airflow-providers-salesforce/changelog.rst +++ b/docs/apache-airflow-providers-salesforce/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/salesforce/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/salesforce/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst index cf4ea7a539bb..610be66bbc27 100644 --- a/docs/apache-airflow-providers-salesforce/index.rst +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/salesforce/index> + System Tests <_api/tests/system/salesforce/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-salesforce/operators/bulk.rst b/docs/apache-airflow-providers-salesforce/operators/bulk.rst index 037f819c3c7b..8e1bdd8415c9 100644 --- a/docs/apache-airflow-providers-salesforce/operators/bulk.rst +++ b/docs/apache-airflow-providers-salesforce/operators/bulk.rst @@ -28,7 +28,7 @@ Using the Operator You can use this operator to access Bulk Insert API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_insert_operation] @@ -36,7 +36,7 @@ You can use this operator to access Bulk Insert API: You can use this operator to access Bulk Update API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_update_operation] @@ -44,7 +44,7 @@ You can use this operator to access Bulk Update API: You can use this operator to access Bulk Upsert API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_upsert_operation] @@ -52,7 +52,7 @@ You can use this operator to access Bulk Upsert API: You can use this operator to access Bulk Delete API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_delete_operation] diff --git a/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst b/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst index 00b4b4c64343..372a4efff538 100644 --- a/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst +++ b/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst @@ -30,7 +30,7 @@ You can also use this library to call custom Apex methods: This would call the endpoint ``https://.salesforce.com/services/apexrest/User/Activity`` with ``payload`` as the body content encoded with ``json.dumps`` -.. exampleinclude:: /../../tests/system/providers/salesforce/example_salesforce_apex_rest.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_salesforce_apex_rest.py :language: python :start-after: [START howto_salesforce_apex_rest_operator] :end-before: [END howto_salesforce_apex_rest_operator] diff --git a/docs/apache-airflow-providers-samba/changelog.rst b/docs/apache-airflow-providers-samba/changelog.rst index e22abbadb41c..8bcfbb1b1f01 100644 --- a/docs/apache-airflow-providers-samba/changelog.rst +++ b/docs/apache-airflow-providers-samba/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/samba/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/samba/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-samba/index.rst b/docs/apache-airflow-providers-samba/index.rst index a252f74481e8..11ac45f8c1d9 100644 --- a/docs/apache-airflow-providers-samba/index.rst +++ b/docs/apache-airflow-providers-samba/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/samba/index> + System Tests <_api/tests/system/samba/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst b/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst index c63f468bd665..86f6de1926ef 100644 --- a/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst +++ b/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst @@ -42,7 +42,7 @@ Copying a single file The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_copy_single_file] @@ -55,7 +55,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to S the original file from the Google Storage is deleted. The ``destination_path`` parameter defines the full path of the file on the Samba server. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_move_single_file_destination] @@ -67,7 +67,7 @@ Copying a directory Use the ``wildcard`` in ``source_path`` parameter to copy a directory. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_copy_directory] @@ -79,7 +79,7 @@ Moving specific files Use the ``wildcard`` in ``source_path`` parameter to move the specific files. The ``destination_path`` defines the path that is prefixed to all copied files. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_move_specific_files] diff --git a/docs/apache-airflow-providers-segment/changelog.rst b/docs/apache-airflow-providers-segment/changelog.rst index 9a9e27c11197..b8eafd739d1f 100644 --- a/docs/apache-airflow-providers-segment/changelog.rst +++ b/docs/apache-airflow-providers-segment/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/segment/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/segment/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sendgrid/changelog.rst b/docs/apache-airflow-providers-sendgrid/changelog.rst index 43ddc029119e..913b7477b13a 100644 --- a/docs/apache-airflow-providers-sendgrid/changelog.rst +++ b/docs/apache-airflow-providers-sendgrid/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/sendgrid/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/sendgrid/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sftp/changelog.rst b/docs/apache-airflow-providers-sftp/changelog.rst index 2ed184d07da7..3915d213a711 100644 --- a/docs/apache-airflow-providers-sftp/changelog.rst +++ b/docs/apache-airflow-providers-sftp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/sftp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/sftp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sftp/index.rst b/docs/apache-airflow-providers-sftp/index.rst index 9acb9773fadc..a69f41a67880 100644 --- a/docs/apache-airflow-providers-sftp/index.rst +++ b/docs/apache-airflow-providers-sftp/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/sftp/index> + System Tests <_api/tests/system/sftp/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst b/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst index b99bc36672d3..32f2a20696a7 100644 --- a/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst +++ b/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst @@ -21,7 +21,7 @@ SFTP Sensor Looks for either a specific file or files with a specific pattern in a server using SFTP protocol. To get more information about this sensor visit :class:`~airflow.providers.sftp.sensors.sftp.SFTPSensor` -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_sensor] @@ -39,7 +39,7 @@ op_kwargs (optional) Whatever returned by the python callable is put into XCom. -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_sensor_decorator] @@ -47,7 +47,7 @@ Whatever returned by the python callable is put into XCom. Checks for the existence of a file on an SFTP server in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_sftp_deferrable] diff --git a/docs/apache-airflow-providers-singularity/changelog.rst b/docs/apache-airflow-providers-singularity/changelog.rst index 1da16678d8e1..eaa3226835fd 100644 --- a/docs/apache-airflow-providers-singularity/changelog.rst +++ b/docs/apache-airflow-providers-singularity/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/singularity/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/singularity/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst index 3dd7c559bc83..656a7a64a27a 100644 --- a/docs/apache-airflow-providers-singularity/index.rst +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -41,14 +41,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/singularity/index> + System Tests <_api/tests/system/singularity/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-slack/changelog.rst b/docs/apache-airflow-providers-slack/changelog.rst index ec3a2f20d657..75fb0c94dfe7 100644 --- a/docs/apache-airflow-providers-slack/changelog.rst +++ b/docs/apache-airflow-providers-slack/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/slack/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/slack/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-slack/index.rst b/docs/apache-airflow-providers-slack/index.rst index f9e6e02e9fd1..7fca57690168 100644 --- a/docs/apache-airflow-providers-slack/index.rst +++ b/docs/apache-airflow-providers-slack/index.rst @@ -34,7 +34,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/slack/index> + System Tests <_api/tests/system/slack/index> .. toctree:: :hidden: @@ -51,7 +51,7 @@ :caption: References Python API <_api/airflow/providers/slack/index> - Example DAGs + Example DAGs .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-slack/operators/slack_api.rst b/docs/apache-airflow-providers-slack/operators/slack_api.rst index 6fafa46f7941..7ab98f6b7eb8 100644 --- a/docs/apache-airflow-providers-slack/operators/slack_api.rst +++ b/docs/apache-airflow-providers-slack/operators/slack_api.rst @@ -34,7 +34,7 @@ Using the Operator You could send simple text message -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :dedent: 4 :start-after: [START slack_api_post_operator_text_howto_guide] @@ -43,7 +43,7 @@ You could send simple text message Or you could use `Block Kit `_ for create app layouts -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :dedent: 4 :start-after: [START slack_api_post_operator_blocks_howto_guide] @@ -81,7 +81,7 @@ Using the Operator You could send file attachment by specifying file path -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :start-after: [START slack_api_file_operator_howto_guide] :end-before: [END slack_api_file_operator_howto_guide] @@ -89,7 +89,7 @@ You could send file attachment by specifying file path Or by directly providing file contents -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :start-after: [START slack_api_file_operator_content_howto_guide] :end-before: [END slack_api_file_operator_content_howto_guide] diff --git a/docs/apache-airflow-providers-slack/operators/slack_webhook.rst b/docs/apache-airflow-providers-slack/operators/slack_webhook.rst index aded2a76bb7c..0b599312ba61 100644 --- a/docs/apache-airflow-providers-slack/operators/slack_webhook.rst +++ b/docs/apache-airflow-providers-slack/operators/slack_webhook.rst @@ -30,7 +30,7 @@ Using the Operator You could send simple text message -.. exampleinclude:: /../../tests/system/providers/slack/example_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack_webhook.py :language: python :dedent: 4 :start-after: [START slack_webhook_operator_text_howto_guide] @@ -39,7 +39,7 @@ You could send simple text message Or you could use `Block Kit `_ for create app layouts -.. exampleinclude:: /../../tests/system/providers/slack/example_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack_webhook.py :language: python :dedent: 4 :start-after: [START slack_webhook_operator_blocks_howto_guide] diff --git a/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst b/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst index 3182fd7e0b78..5da6a2bd593d 100644 --- a/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst +++ b/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst @@ -51,7 +51,7 @@ This operator will execute a custom query in the provided SQL connection and pub An example usage of the SqlToSlackApiFileOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/slack/example_sql_to_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_sql_to_slack.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_to_slack_api_file] diff --git a/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst b/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst index 3ad72e2a9ad7..aa46fe7b0350 100644 --- a/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst +++ b/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst @@ -31,7 +31,7 @@ and contain the resulting dataset (e.g. ASCII formatted dataframe). An example usage of the SqlToSlackWebhookOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/slack/example_sql_to_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_sql_to_slack_webhook.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_to_slack_webhook] diff --git a/docs/apache-airflow-providers-smtp/changelog.rst b/docs/apache-airflow-providers-smtp/changelog.rst index 105ae3011563..3ada8a5cf420 100644 --- a/docs/apache-airflow-providers-smtp/changelog.rst +++ b/docs/apache-airflow-providers-smtp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/smtp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/smtp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-snowflake/changelog.rst b/docs/apache-airflow-providers-snowflake/changelog.rst index 6ccd3e61dc37..932a260df718 100644 --- a/docs/apache-airflow-providers-snowflake/changelog.rst +++ b/docs/apache-airflow-providers-snowflake/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/snowflake/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/snowflake/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst b/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst index 09be01e3efc8..947cfec52b5e 100644 --- a/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst +++ b/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst @@ -51,7 +51,7 @@ Use the ``snowflake_conn_id`` argument to specify connection used. If not specif An example usage of the ``@task.snowpark`` is as follows: -.. exampleinclude:: /../../tests/system/providers/snowflake/example_snowpark_decorator.py +.. exampleinclude:: /../../providers/tests/system/snowflake/example_snowpark_decorator.py :language: python :start-after: [START howto_decorator_snowpark] :end-before: [END howto_decorator_snowpark] diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index c9746f780694..c9f3b8f8872e 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/snowflake/index> + System Tests <_api/tests/system/snowflake/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst b/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst index 49fca623f2f9..30236121abbb 100644 --- a/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst +++ b/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst @@ -43,7 +43,7 @@ a file format (see `docs + System Tests <_api/tests/system/sqlite/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-sqlite/operators.rst b/docs/apache-airflow-providers-sqlite/operators.rst index 472ec7145249..5b113162ae4c 100644 --- a/docs/apache-airflow-providers-sqlite/operators.rst +++ b/docs/apache-airflow-providers-sqlite/operators.rst @@ -45,14 +45,14 @@ the connection metadata is structured as follows: An example usage of the SQLExecuteQueryOperator to connect to Sqlite is as follows: -.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py +.. exampleinclude:: /../../providers/tests/system/sqlite/example_sqlite.py :language: python :start-after: [START howto_operator_sqlite] :end-before: [END howto_operator_sqlite] Furthermore, you can use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. -.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py +.. exampleinclude:: /../../providers/tests/system/sqlite/example_sqlite.py :language: python :start-after: [START howto_operator_sqlite_external_file] :end-before: [END howto_operator_sqlite_external_file] diff --git a/docs/apache-airflow-providers-ssh/changelog.rst b/docs/apache-airflow-providers-ssh/changelog.rst index b2212a396cc6..33bcb1f1b408 100644 --- a/docs/apache-airflow-providers-ssh/changelog.rst +++ b/docs/apache-airflow-providers-ssh/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ssh/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ssh/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-standard/changelog.rst b/docs/apache-airflow-providers-standard/changelog.rst index 3d9d5b25f554..2e1b328f5e5e 100644 --- a/docs/apache-airflow-providers-standard/changelog.rst +++ b/docs/apache-airflow-providers-standard/changelog.rst @@ -15,4 +15,4 @@ specific language governing permissions and limitations under the License. -.. include:: ../../airflow/providers/standard/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/standard/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-tableau/changelog.rst b/docs/apache-airflow-providers-tableau/changelog.rst index 527d56d2c424..7716b6ab1bf6 100644 --- a/docs/apache-airflow-providers-tableau/changelog.rst +++ b/docs/apache-airflow-providers-tableau/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/tableau/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/tableau/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst index 0a4789db7312..e0f140e0fae4 100644 --- a/docs/apache-airflow-providers-tableau/index.rst +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -43,14 +43,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/tableau/index> + System Tests <_api/tests/system/tableau/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-tableau/operators.rst b/docs/apache-airflow-providers-tableau/operators.rst index ba250e1be41f..7452d7899f4d 100644 --- a/docs/apache-airflow-providers-tableau/operators.rst +++ b/docs/apache-airflow-providers-tableau/operators.rst @@ -68,7 +68,7 @@ Using the Operator An example usage of the TableauOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/tableau/example_tableau.py +.. exampleinclude:: /../../providers/tests/system/tableau/example_tableau.py :language: python :start-after: [START howto_operator_tableau] :end-before: [END howto_operator_tableau] diff --git a/docs/apache-airflow-providers-telegram/changelog.rst b/docs/apache-airflow-providers-telegram/changelog.rst index 704e8de2f6f2..073d54c1b5e8 100644 --- a/docs/apache-airflow-providers-telegram/changelog.rst +++ b/docs/apache-airflow-providers-telegram/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/telegram/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/telegram/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-telegram/index.rst b/docs/apache-airflow-providers-telegram/index.rst index 5c38d229986b..58efd772bb28 100644 --- a/docs/apache-airflow-providers-telegram/index.rst +++ b/docs/apache-airflow-providers-telegram/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/telegram/index> + System Tests <_api/tests/system/telegram/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-telegram/operators.rst b/docs/apache-airflow-providers-telegram/operators.rst index 0f3ad86b19d0..de693f54001e 100644 --- a/docs/apache-airflow-providers-telegram/operators.rst +++ b/docs/apache-airflow-providers-telegram/operators.rst @@ -48,7 +48,7 @@ the connection metadata is structured as follows: An example usage of the TelegramOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/telegram/example_telegram.py +.. exampleinclude:: /../../providers/tests/system/telegram/example_telegram.py :language: python :start-after: [START howto_operator_telegram] :end-before: [END howto_operator_telegram] diff --git a/docs/apache-airflow-providers-teradata/changelog.rst b/docs/apache-airflow-providers-teradata/changelog.rst index 90c80663b40d..577e6333e205 100644 --- a/docs/apache-airflow-providers-teradata/changelog.rst +++ b/docs/apache-airflow-providers-teradata/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/teradata/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/teradata/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-teradata/index.rst b/docs/apache-airflow-providers-teradata/index.rst index 44310a10b1e7..ecf00232484d 100644 --- a/docs/apache-airflow-providers-teradata/index.rst +++ b/docs/apache-airflow-providers-teradata/index.rst @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/teradata/index> + System Tests <_api/tests/system/teradata/index> System Tests Dashboard .. toctree:: diff --git a/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst index 194eabd0cd00..4cc3da46a902 100644 --- a/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst @@ -60,7 +60,7 @@ Transferring data from public Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from public Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] :end-before: [END azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] @@ -71,7 +71,7 @@ Transferring data from private Azure Blob Storage to Teradata with AWS connectio An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from private S3 object store to teradata with AWS credentials defined as AWS connection: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_access_blob_to_teradata_csv] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_access_blob_to_teradata_csv] @@ -85,7 +85,7 @@ database object should exists in Teradata database to use it in transferring dat An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from private S3 object store to teradata with Authorization database object defined in Teradata. -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_authorization_blob_to_teradata_csv] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_authorization_blob_to_teradata_csv] @@ -96,7 +96,7 @@ Transferring data in CSV format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] :end-before: [END azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] @@ -107,7 +107,7 @@ Transferring data in JSON format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer JSON data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json] @@ -118,7 +118,7 @@ Transferring data in PARQUET format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer PARQUET data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet] @@ -128,7 +128,7 @@ The complete ``AzureBlobStorageToTeradataOperator`` Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst b/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst index ceaf27ee7467..507aac5c90ac 100644 --- a/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst +++ b/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst @@ -33,7 +33,7 @@ to provision the new Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterProvisionOperator to provision the new Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_provision_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_provision_howto_guide] @@ -56,7 +56,7 @@ to decommission the specified Teradata Vantage Cloud Lake Compute Cluster. An example usage of the TeradataComputeClusterDecommissionOperator to decommission the specified Teradata Vantage Cloud Lake Compute Cluster is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_decommission_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_decommission_howto_guide] @@ -79,7 +79,7 @@ to start the specified Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterSuspendOperator to start the specified Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_resume_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_resume_howto_guide] @@ -101,7 +101,7 @@ to suspend the specified Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterSuspendOperator to suspend the specified Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_suspend_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_suspend_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst index da52e2841bfa..e24887126a2a 100644 --- a/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst @@ -45,7 +45,7 @@ Transferring data in CSV format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer CSV data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_public_s3_to_teradata_csv] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_public_s3_to_teradata_csv] @@ -55,7 +55,7 @@ Transferring data in JSON format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer JSON data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json] @@ -65,7 +65,7 @@ Transferring data in PARQUET format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer PARQUET data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet] @@ -75,7 +75,7 @@ The complete ``S3ToTeradataOperator`` Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide] :end-before: [END s3_to_teradata_transfer_operator_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/teradata.rst b/docs/apache-airflow-providers-teradata/operators/teradata.rst index 6fd7d371a7b0..78e2e1b12e80 100644 --- a/docs/apache-airflow-providers-teradata/operators/teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/teradata.rst @@ -33,7 +33,7 @@ Creating a Teradata database table An example usage of the TeradataOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :dedent: 4 :start-after: [START teradata_operator_howto_guide_create_table] @@ -42,7 +42,7 @@ An example usage of the TeradataOperator is as follows: You can also use an external file to execute the SQL commands. External file must be at the same level as DAG.py file. This way you can easily maintain the SQL queries separated from the code. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_create_table_from_external_file] :end-before: [END teradata_operator_howto_guide_create_table_from_external_file] @@ -63,7 +63,7 @@ Inserting data into a Teradata database table --------------------------------------------- We can then create a TeradataOperator task that populate the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_populate_table] :end-before: [END teradata_operator_howto_guide_populate_table] @@ -74,7 +74,7 @@ Fetching records from your Teradata database table Fetching records from your Teradata database table can be as simple as: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_get_all_countries] :end-before: [END teradata_operator_howto_guide_get_all_countries] @@ -88,7 +88,7 @@ SQL requests during runtime. To find the countries in Asian continent: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_params_passing_get_query] :end-before: [END teradata_operator_howto_guide_params_passing_get_query] @@ -99,7 +99,7 @@ Dropping a Teradata database table We can then create a TeradataOperator task that drops the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_drop_users_table] :end-before: [END teradata_operator_howto_guide_drop_users_table] @@ -109,7 +109,7 @@ The complete Teradata Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide] :end-before: [END teradata_operator_howto_guide] @@ -152,21 +152,21 @@ This stored procedure can be invoked using One approach involves passing parameters positionally as a list, with output parameters specified as Python data types: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_types] :end-before: [END howto_call_teradata_stored_procedure_operator_with_types] Alternatively, parameters can be passed positionally as a list, with output parameters designated as placeholders: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_place_holder] :end-before: [END howto_call_teradata_stored_procedure_operator_with_place_holder] Another method entails passing parameters positionally as a dictionary: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_dict_input] :end-before: [END howto_call_teradata_stored_procedure_operator_with_dict_input] @@ -186,7 +186,7 @@ This stored procedure yields a singular timestamp argument, out_timestamp, and i :class:`~airflow.providers.teradata.operators.teradata.TeradataStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_timestamp] :end-before: [END howto_call_teradata_stored_procedure_operator_timestamp] @@ -213,7 +213,7 @@ This stored procedure can be invoked using :class:`~airflow.providers.teradata.operators.teradata.TeradataStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_teradata_stored_procedure_operator_with_in_out_dynamic_result] :end-before: [END howto_teradata_stored_procedure_operator_with_in_out_dynamic_result] @@ -223,7 +223,7 @@ The complete TeradataStoredProcedureOperator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_teradata_operator_for_sp] :end-before: [END howto_teradata_operator_for_sp] diff --git a/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst index 6c551427f073..ec2a414ff7d0 100644 --- a/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst @@ -32,7 +32,7 @@ To transfer data between two Teradata instances, use the An example usage of the TeradataToTeradataOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_to_teradata_transfer.py :language: python :start-after: [START teradata_to_teradata_transfer_operator_howto_guide_transfer_data] :end-before: [END teradata_to_teradata_transfer_operator_howto_guide_transfer_data] @@ -42,7 +42,7 @@ The complete TeradataToTeradata Transfer Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide] :end-before: [END teradata_operator_howto_guide] diff --git a/docs/apache-airflow-providers-trino/changelog.rst b/docs/apache-airflow-providers-trino/changelog.rst index e29b41a31529..c3d1995dd1ea 100644 --- a/docs/apache-airflow-providers-trino/changelog.rst +++ b/docs/apache-airflow-providers-trino/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/trino/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/trino/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst index 9a17b5907e5d..70a8d85c9a2e 100644 --- a/docs/apache-airflow-providers-trino/index.rst +++ b/docs/apache-airflow-providers-trino/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/trino/index> + System Tests <_api/tests/system/trino/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst b/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst index 39224b3ca3d0..4d1cbca9d14d 100644 --- a/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst +++ b/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst @@ -39,7 +39,7 @@ This operator assumes that CSV does not have headers and the data is correspondi pre-existing presto table. Optionally, you can provide schema as tuple/list of strings or as a path to a JSON file in the same bucket as the CSV file. -.. exampleinclude:: /../../tests/system/providers/trino/example_gcs_to_trino.py +.. exampleinclude:: /../../providers/tests/system/trino/example_gcs_to_trino.py :language: python :dedent: 4 :start-after: [START gcs_csv_to_trino_table] diff --git a/docs/apache-airflow-providers-trino/operators/trino.rst b/docs/apache-airflow-providers-trino/operators/trino.rst index d0e901ebed9f..dc076c82c7c2 100644 --- a/docs/apache-airflow-providers-trino/operators/trino.rst +++ b/docs/apache-airflow-providers-trino/operators/trino.rst @@ -34,7 +34,7 @@ Use the ``trino_conn_id`` argument to connect to your Trino instance An example usage of the SQLExecuteQueryOperator to connect to Trino is as follows: -.. exampleinclude:: /../../tests/system/providers/trino/example_trino.py +.. exampleinclude:: /../../providers/tests/system/trino/example_trino.py :language: python :start-after: [START howto_operator_trino] :end-before: [END howto_operator_trino] diff --git a/docs/apache-airflow-providers-vertica/changelog.rst b/docs/apache-airflow-providers-vertica/changelog.rst index 1f8dd4aa880e..2c77a34914de 100644 --- a/docs/apache-airflow-providers-vertica/changelog.rst +++ b/docs/apache-airflow-providers-vertica/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/vertica/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/vertica/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-weaviate/changelog.rst b/docs/apache-airflow-providers-weaviate/changelog.rst index c2fc65bc5ceb..dd6e11ef6876 100644 --- a/docs/apache-airflow-providers-weaviate/changelog.rst +++ b/docs/apache-airflow-providers-weaviate/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/weaviate/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/weaviate/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-weaviate/index.rst b/docs/apache-airflow-providers-weaviate/index.rst index c2a4965414fe..fd7998daac39 100644 --- a/docs/apache-airflow-providers-weaviate/index.rst +++ b/docs/apache-airflow-providers-weaviate/index.rst @@ -60,7 +60,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/weaviate/index> + System Tests <_api/tests/system/weaviate/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-weaviate/operators/weaviate.rst b/docs/apache-airflow-providers-weaviate/operators/weaviate.rst index 5ec262ab7a2d..ae9fcdba58ee 100644 --- a/docs/apache-airflow-providers-weaviate/operators/weaviate.rst +++ b/docs/apache-airflow-providers-weaviate/operators/weaviate.rst @@ -33,28 +33,28 @@ connect to your account. An example using the operator to ingest data with custom vectors retrieved from XCOM: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_embedding_and_ingest_xcom_data_with_vectors] :end-before: [END howto_operator_weaviate_embedding_and_ingest_xcom_data_with_vectors] An example using the operator to ingest data with custom vectors retrieved from a python callable: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_embedding_and_ingest_callable_data_with_vectors] :end-before: [END howto_operator_weaviate_embedding_and_ingest_callable_data_with_vectors] An example using the operator to ingest data without vectors retrieved from XCOM for which the operator would generate embedding vectors: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_ingest_xcom_data_without_vectors] :end-before: [END howto_operator_weaviate_ingest_xcom_data_without_vectors] An example using the operator to ingest data without vectors retrieved from a python callable for which the operator would generate embedding vectors: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_ingest_callable_data_without_vectors] :end-before: [END howto_operator_weaviate_ingest_callable_data_without_vectors] diff --git a/docs/apache-airflow-providers-yandex/changelog.rst b/docs/apache-airflow-providers-yandex/changelog.rst index 066c3c5be0be..9bcad616eb83 100644 --- a/docs/apache-airflow-providers-yandex/changelog.rst +++ b/docs/apache-airflow-providers-yandex/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/yandex/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/yandex/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst index 03495bcef461..08f3953aefeb 100644 --- a/docs/apache-airflow-providers-yandex/index.rst +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -51,14 +51,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/yandex/index> + System Tests <_api/tests/system/yandex/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-yandex/operators/dataproc.rst b/docs/apache-airflow-providers-yandex/operators/dataproc.rst index 2bb08d859de4..03dfd3acae81 100644 --- a/docs/apache-airflow-providers-yandex/operators/dataproc.rst +++ b/docs/apache-airflow-providers-yandex/operators/dataproc.rst @@ -34,4 +34,4 @@ that can be integrated with Apache Hadoop and other storage systems. Using the operators ^^^^^^^^^^^^^^^^^^^ To learn how to use Data Proc operators, -see `example DAGs `_. +see `example DAGs `_. diff --git a/docs/apache-airflow-providers-yandex/operators/yq.rst b/docs/apache-airflow-providers-yandex/operators/yq.rst index 78bdb733ee1f..23bd4ac33616 100644 --- a/docs/apache-airflow-providers-yandex/operators/yq.rst +++ b/docs/apache-airflow-providers-yandex/operators/yq.rst @@ -25,4 +25,4 @@ Yandex Query Operators Using the operators ^^^^^^^^^^^^^^^^^^^ To learn how to use Yandex Query operator, -see `example DAG `__. +see `example DAG `__. diff --git a/docs/apache-airflow-providers-ydb/changelog.rst b/docs/apache-airflow-providers-ydb/changelog.rst index 801c69978c6b..e77ade1c9372 100644 --- a/docs/apache-airflow-providers-ydb/changelog.rst +++ b/docs/apache-airflow-providers-ydb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ydb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ydb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ydb/index.rst b/docs/apache-airflow-providers-ydb/index.rst index 4dff0e421f9b..30b8e90d9753 100644 --- a/docs/apache-airflow-providers-ydb/index.rst +++ b/docs/apache-airflow-providers-ydb/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ydb/index> + System Tests <_api/tests/system/ydb/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst b/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst index 894be8101d5b..9416894e5332 100644 --- a/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst +++ b/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst @@ -50,7 +50,7 @@ Creating an YDB table The code snippets below are based on Airflow-2.0 -.. exampleinclude:: /../../tests/system/providers/ydb/example_ydb.py +.. exampleinclude:: /../../providers/tests/system/ydb/example_ydb.py :language: python :start-after: [START ydb_operator_howto_guide] :end-before: [END ydb_operator_howto_guide_create_pet_table] @@ -187,7 +187,7 @@ The complete YDB Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/ydb/example_ydb.py +.. exampleinclude:: /../../providers/tests/system/ydb/example_ydb.py :language: python :start-after: [START ydb_operator_howto_guide] :end-before: [END ydb_operator_howto_guide] diff --git a/docs/apache-airflow-providers-zendesk/changelog.rst b/docs/apache-airflow-providers-zendesk/changelog.rst index 3be2afdb2306..eacd3ee51d1b 100644 --- a/docs/apache-airflow-providers-zendesk/changelog.rst +++ b/docs/apache-airflow-providers-zendesk/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/zendesk/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/zendesk/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-zendesk/index.rst b/docs/apache-airflow-providers-zendesk/index.rst index 0b2852be2428..68d957644188 100644 --- a/docs/apache-airflow-providers-zendesk/index.rst +++ b/docs/apache-airflow-providers-zendesk/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/zendesk/index> + System Tests <_api/tests/system/zendesk/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow/tutorial/taskflow.rst b/docs/apache-airflow/tutorial/taskflow.rst index aac04f9b5345..e15e1c78045c 100644 --- a/docs/apache-airflow/tutorial/taskflow.rst +++ b/docs/apache-airflow/tutorial/taskflow.rst @@ -307,7 +307,7 @@ Below is an example of using the ``@task.docker`` decorator to run a Python task .. _taskflow/docker_example: -.. exampleinclude:: /../../tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +.. exampleinclude:: /../../providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py :language: python :dedent: 4 :start-after: [START transform_docker] @@ -338,7 +338,7 @@ Below is an example of using the ``@task.kubernetes`` decorator to run a Python .. _taskflow/kubernetes_example: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py :language: python :dedent: 4 :start-after: [START howto_operator_kubernetes] diff --git a/docs/build_docs.py b/docs/build_docs.py index f856d6828e8e..82edc9d1632d 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -35,7 +35,7 @@ from tabulate import tabulate from docs.exts.docs_build import dev_index_generator, lint_checks -from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE +from docs.exts.docs_build.code_utils import CONSOLE_WIDTH from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages from docs.exts.docs_build.errors import DocBuildError, display_errors_summary from docs.exts.docs_build.fetch_inventories import fetch_inventories @@ -566,9 +566,6 @@ def main(): if not package_filters: _promote_new_flags() - if os.path.exists(PROVIDER_INIT_FILE): - os.remove(PROVIDER_INIT_FILE) - print_build_errors_and_exit( all_build_errors, all_spelling_errors, diff --git a/docs/conf.py b/docs/conf.py index 4d01e402195a..a09e54db63d2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -32,6 +32,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. import json +import logging import os import pathlib import re @@ -74,13 +75,17 @@ ) except StopIteration: raise RuntimeError(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") - PACKAGE_DIR = pathlib.Path(CURRENT_PROVIDER["package-dir"]) + + # Oddity: since we set autoapi_python_use_implicit_namespaces for provider packages, it does a "../"on the + # dir we give it. So we want to set the package dir to be airflow so it goes up to src, else we end up + # with "src" in the output paths of modules which we don't want + PACKAGE_DIR = ROOT_DIR / "providers" / "src" / "airflow" PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0] - SYSTEM_TESTS_DIR = CURRENT_PROVIDER["system-tests-dir"] + SYSTEM_TESTS_DIR = ROOT_DIR / "providers" / "tests" / "system" elif PACKAGE_NAME == "apache-airflow-providers": from provider_yaml_utils import load_package_data - PACKAGE_DIR = ROOT_DIR / "airflow" / "providers" + PACKAGE_DIR = ROOT_DIR / "providers" / "src" PACKAGE_VERSION = "devel" ALL_PROVIDER_YAMLS = load_package_data() SYSTEM_TESTS_DIR = None @@ -108,8 +113,13 @@ global_substitutions = { "version": PACKAGE_VERSION, "airflow-version": airflow.__version__, + "experimental": "This is an :ref:`experimental feature `.", } +if PACKAGE_NAME != "apache-airflow": + global_substitutions["experimental"] = "This is an :external:ref:`experimental feature `." + + # == Sphinx configuration ====================================================== # -- Project information ------------------------------------------------------- @@ -125,13 +135,7 @@ # -- General configuration ----------------------------------------------------- # See: https://www.sphinx-doc.org/en/master/usage/configuration.html -rst_epilog = "\n".join( - f".. |{key}| replace:: {replace}" - for key, replace in { - **global_substitutions, - "experimental": "This is an :ref:`experimental feature `.", - }.items() -) +rst_epilog = "\n".join(f".. |{key}| replace:: {replace}" for key, replace in global_substitutions.items()) smartquotes_excludes = {"builders": ["man", "text", "spelling"]} @@ -139,7 +143,6 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "provider_init_hack", "sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinxarg.ext", @@ -740,8 +743,6 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") if PACKAGE_NAME != "docker-stack": autoapi_dirs.append(PACKAGE_DIR) -if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): - autoapi_dirs.append(SYSTEM_TESTS_DIR) # A directory that has user-defined templates to override our default templates. if PACKAGE_NAME == "apache-airflow": @@ -755,16 +756,74 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") "*/node_modules/*", "*/migrations/*", "*/contrib/*", - "**/example_taskflow_api_docker_virtualenv.py", - "**/example_dag_decorator.py", + "*/example_taskflow_api_docker_virtualenv.py", + "*/example_dag_decorator.py", + "*/conftest.py", + "*/tests/__init__.py", + "*/tests/system/__init__.py", + "*/test_aws_auth_manager.py", + # These sub-folders aren't really providers, but we need __init__.py files else various tools (ruff, mypy) + # get confused by providers/tests/systems/cncf/kubernetes and think that folder is the top level + # kubernetes module! + "*/providers/tests/__init__.py", + "*/providers/tests/cncf/__init__.py", + "*/providers/tests/common/__init__.py", + "*/providers/tests/apache/__init__.py", + "*/providers/tests/dbt/__init__.py", + "*/providers/tests/microsoft/__init__.py", + "*/providers/tests/system/__init__.py", + "*/providers/tests/system/apache/__init__.py", + "*/providers/tests/system/cncf/__init__.py", + "*/providers/tests/system/common/__init__.py", + "*/providers/tests/system/dbt/__init__.py", + "*/providers/tests/system/microsoft/__init__.py", ] -if PACKAGE_NAME == "apache-airflow": - autoapi_ignore.append("*/airflow/providers/*") -elif PACKAGE_NAME == "docker-stack": - autoapi_ignore.append("*/airflow/providers/*") + +ignore_re = re.compile(r"\[AutoAPI\] .* Ignoring \s (?P/[\w/.]*)", re.VERBOSE) + + +# Make the "Ignoring /..." log messages slightly less verbose +def filter_ignore(record: logging.LogRecord) -> bool: + matches = ignore_re.search(record.msg) + if not matches: + return True + if matches["path"].endswith("__init__.py"): + record.msg = record.msg.replace("__init__.py", "") + return True + return False + + +autoapi_log = logging.getLogger("sphinx.autoapi.mappers.base") +autoapi_log.addFilter(filter_ignore) + +if PACKAGE_NAME.startswith("apache-airflow-providers-"): + autoapi_python_use_implicit_namespaces = True + from provider_yaml_utils import load_package_data + + autoapi_ignore.extend( + ( + "*/airflow/__init__.py", + "*/airflow/providiers/__init__.py", + "*/example_dags/*", + "*/airflow/providers/cncf/kubernetes/backcompat/*", + ) + ) + + for p in load_package_data(include_suspended=True): + if p["package-name"] == PACKAGE_NAME: + continue + autoapi_ignore.extend((p["package-dir"] + "/*", p["system-tests-dir"] + "/*")) + + autoapi_keep_files = True + + if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + test_dir = SYSTEM_TESTS_DIR.parent + autoapi_dirs.append(test_dir) + + autoapi_ignore.extend(f"{d}/*" for d in test_dir.glob("*") if d.is_dir() and d.name != "system") else: - autoapi_ignore.append("*/airflow/providers/cncf/kubernetes/backcompat/*") - autoapi_ignore.append("*/example_dags/*") + if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + autoapi_dirs.append(SYSTEM_TESTS_DIR) # Keep the AutoAPI generated files on the filesystem after the run. # Useful for debugging. autoapi_keep_files = True diff --git a/docs/exts/docs_build/code_utils.py b/docs/exts/docs_build/code_utils.py index 6aef1ab1f305..3bbade4beb59 100644 --- a/docs/exts/docs_build/code_utils.py +++ b/docs/exts/docs_build/code_utils.py @@ -24,7 +24,6 @@ ROOT_PROJECT_DIR = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) ) -PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py") DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") AIRFLOW_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index eade660515db..ee5d9329d2a2 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -188,7 +188,18 @@ def create_node(env, relative_path, show_button): :param show_button: whether to show "view code" button :return paragraph with the node """ - pagename = "_modules/" + relative_path[:-3] + + # Strip "providers" out of the example title that we include/link to. The full path needs to include + # it so we can pull in the code, but we don't want it to show up in the rendered docs + if relative_path.startswith("providers/src/"): + relative_path = relative_path.replace("providers/src/", "", 1) + elif relative_path.startswith("providers/"): + relative_path = relative_path.replace("providers/", "", 1) + + if relative_path.endswith(".py"): + pagename = "_modules/" + relative_path[:-3] + else: + pagename = "_modules/" + relative_path header_classes = ["example-header"] if show_button: diff --git a/docs/exts/provider_init_hack.py b/docs/exts/provider_init_hack.py deleted file mode 100644 index 819a78b95d7b..000000000000 --- a/docs/exts/provider_init_hack.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -Bugs in sphinx-autoapi using metaclasses prevent us from upgrading to 1.3 -which has implicit namespace support. Until that time, we make it look -like a real package for building docs -""" - -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from sphinx.application import Sphinx - -ROOT_PROJECT_DIR = os.path.abspath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) - -PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py") - - -def _create_init_py(app, config): - del app - del config - # This file is deleted by /docs/build_docs.py. If you are not using the script, the file will be - # deleted by pre-commit. - with open(PROVIDER_INIT_FILE, "w"): - pass - - -def setup(app: Sphinx): - """ - Sets the plugin up and returns configuration of the plugin. - - :param app: application. - :return json description of the configuration that is needed by the plugin. - """ - app.connect("config-inited", _create_init_py) - - return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True} diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index 1eaf93a6986e..6e5d3a835e1d 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -18,7 +18,7 @@ import json import os -from glob import glob +from functools import lru_cache from pathlib import Path from typing import Any @@ -26,6 +26,8 @@ import yaml ROOT_DIR = Path(__file__).parents[2].resolve() +AIRFLOW_PROVIDERS_SRC = ROOT_DIR / "providers" / "src" +AIRFLOW_PROVIDERS_NS_PACKAGE = AIRFLOW_PROVIDERS_SRC / "airflow" / "providers" PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" @@ -36,24 +38,22 @@ def _load_schema() -> dict[str, Any]: def _filepath_to_module(filepath: str): - return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + return str(Path(filepath).relative_to(AIRFLOW_PROVIDERS_SRC)).replace("/", ".") def _filepath_to_system_tests(filepath: str): return str( - ROOT_DIR - / "tests" - / "system" - / "providers" - / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") + ROOT_DIR / "providers" / "tests" / "system" / Path(filepath).relative_to(AIRFLOW_PROVIDERS_NS_PACKAGE) ) +@lru_cache def get_provider_yaml_paths(): """Returns list of provider.yaml files""" - return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True)) + return sorted(AIRFLOW_PROVIDERS_NS_PACKAGE.rglob("**/provider.yaml")) +@lru_cache def load_package_data(include_suspended: bool = False) -> list[dict[str, Any]]: """ Load all data from providers files diff --git a/docs/exts/providers_extensions.py b/docs/exts/providers_extensions.py index 5de6dcb8eb7b..1b59cc00aca3 100644 --- a/docs/exts/providers_extensions.py +++ b/docs/exts/providers_extensions.py @@ -23,10 +23,8 @@ from pathlib import Path from typing import Any, Iterable -import yaml - # No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755. -from provider_yaml_utils import get_provider_yaml_paths +from provider_yaml_utils import load_package_data from docs.exts.operators_and_hooks_ref import ( DEFAULT_HEADER_SEPARATOR, @@ -64,7 +62,7 @@ def get_import_mappings(tree): def _get_module_class_registry( - module_filepath: str, class_extras: dict[str, Any] + module_filepath: Path, module_name: str, class_extras: dict[str, Any] ) -> dict[str, dict[str, Any]]: """Extracts classes and its information from a Python module file. @@ -80,7 +78,6 @@ def _get_module_class_registry( with open(module_filepath) as file: ast_obj = ast.parse(file.read()) - module_name = module_filepath.replace("/", ".").replace(".py", "").lstrip(".") import_mappings = get_import_mappings(ast_obj) module_class_registry = { f"{module_name}.{node.name}": { @@ -140,16 +137,26 @@ def _get_providers_class_registry() -> dict[str, dict[str, Any]]: :return: A dictionary with provider names as keys and a dictionary of classes as values. """ class_registry = {} - for provider_yaml_path in get_provider_yaml_paths(): - provider_yaml_content = yaml.safe_load(Path(provider_yaml_path).read_text()) - for root, _, file_names in os.walk(Path(provider_yaml_path).parent): + for provider_yaml_content in load_package_data(): + provider_pkg_root = Path(provider_yaml_content["package-dir"]) + for root, _, file_names in os.walk(provider_pkg_root): + folder = Path(root) for file_name in file_names: - module_filepath = f"{os.path.relpath(root)}/{file_name}" - if not module_filepath.endswith(".py") or module_filepath == "__init__.py": + if not file_name.endswith(".py") or file_name == "__init__.py": continue + module_filepath = folder.joinpath(file_name) + module_registry = _get_module_class_registry( module_filepath=module_filepath, + module_name=( + provider_yaml_content["python-module"] + + "." + + module_filepath.relative_to(provider_pkg_root) + .with_suffix("") + .as_posix() + .replace("/", ".") + ), class_extras={"provider_name": provider_yaml_content["package-name"]}, ) class_registry.update(module_registry) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 2d0ab90a3508..e6a0deca23cb 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -345,8 +345,7 @@ "devel-deps": [], "plugins": [], "cross-providers-deps": [ - "cncf.kubernetes", - "standard" + "cncf.kubernetes" ], "excluded-python-versions": [], "state": "ready" @@ -534,9 +533,7 @@ "plugin-class": "airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin" } ], - "cross-providers-deps": [ - "standard" - ], + "cross-providers-deps": [], "excluded-python-versions": [], "state": "not-ready" }, diff --git a/providers/.gitignore b/providers/.gitignore new file mode 100644 index 000000000000..2924614f3a14 --- /dev/null +++ b/providers/.gitignore @@ -0,0 +1,7 @@ +# Ignore init files in these non-provider folders. I.e. we relrease atlassian/jira, but not atlassian +src/airflow/providers/apache/__init__.py +src/airflow/providers/atlassian/__init__.py +src/airflow/providers/cncf/__init__.py +src/airflow/providers/common/__init__.py +src/airflow/providers/dbt/__init__.py +src/airflow/providers/microsoft/__init__.py diff --git a/tests/test_utils/__init__.py b/providers/__init__.py similarity index 74% rename from tests/test_utils/__init__.py rename to providers/__init__.py index 4c4790fa4b47..c53de5451bd0 100644 --- a/tests/test_utils/__init__.py +++ b/providers/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,10 +14,10 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from __future__ import annotations - -import os -AIRFLOW_MAIN_FOLDER = os.path.realpath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) +# This exists so that pytest doesn't get confused about namespace packages +# and think that `tests/conftest.py` and `providers/tests/conftest.py` are +# both "tests.conftest" +# +# This is a temporary solution until https://github.com/apache/airflow/issues/42632 +# is done diff --git a/providers/pyproject.toml b/providers/pyproject.toml new file mode 100644 index 000000000000..093cbbd54714 --- /dev/null +++ b/providers/pyproject.toml @@ -0,0 +1,98 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "local-providers" +version = "0.1.0" +description = "Placeholder package for local/from-sources providers." +requires-python = ">=3.8, <3.13" +classifiers = [ + "Private :: Do Not Upload", +] + +[tool.hatch.publish.index] +# Lets make doubly sure this never goes to PyPi +disable = true + +[tool.hatch.build.targets.wheel] +packages = ["src/airflow"] +exclude = [ + ".gitignore", + ".latest-doc-only-change.txt", + "CHANGELOG.rst", + "MANAGING_PROVIDERS_LIFECYCLE.rst", +] + +[tool.ruff] +extend = "../pyproject.toml" +src = ["src"] +namespace-packages = ["src/airflow/providers"] +extend-exclude = [ + # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can + # ignore them in ruff + "src/airflow/providers/common/sql/*/*.pyi", +] + +[tool.ruff.lint.per-file-ignores] + +# Ignore Doc rules et al for anything outside of tests +"!src/*" = ["D", "TID253", "S101", "TRY002"] + +# https://github.com/apache/airflow/issues/39252 +"src/airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] + +# All of the modules which have an extra license header (i.e. that we copy from another project) need to +# ignore E402 -- module level import not at top level +"tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py" = ["E402"] +"tests/common/io/xcom/test_backend.py" = ["E402"] +"tests/elasticsearch/log/elasticmock/__init__.py" = ["E402"] +"tests/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] +"tests/google/cloud/links/test_translate.py" = ["E402"] +"tests/google/cloud/operators/test_automl.py"= ["E402"] +"tests/google/cloud/operators/test_vertex_ai.py" = ["E402"] +"tests/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] +"tests/google/cloud/triggers/test_vertex_ai.py" = ["E402"] +"tests/openai/hooks/test_openai.py" = ["E402"] +"tests/openai/operators/test_openai.py" = ["E402"] +"tests/openai/triggers/test_openai.py" = ["E402"] +"tests/opensearch/conftest.py" = ["E402"] +"tests/opensearch/hooks/test_opensearch.py" = ["E402"] +"tests/opensearch/log/test_os_json_formatter.py" = ["E402"] +"tests/opensearch/log/test_os_response.py" = ["E402"] +"tests/opensearch/log/test_os_task_handler.py" = ["E402"] +"tests/opensearch/operators/test_opensearch.py" = ["E402"] +"tests/qdrant/hooks/test_qdrant.py" = ["E402"] +"tests/qdrant/operators/test_qdrant.py" = ["E402"] +"tests/snowflake/operators/test_snowflake_sql.py" = ["E402"] +"tests/yandex/**/*.py" = ["E402"] + +# https://github.com/apache/airflow/issues/39252 +"airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] diff --git a/providers/src/airflow/providers/.gitignore b/providers/src/airflow/providers/.gitignore new file mode 100644 index 000000000000..528066d9003e --- /dev/null +++ b/providers/src/airflow/providers/.gitignore @@ -0,0 +1 @@ +/__init__.py diff --git a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst b/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst similarity index 98% rename from airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst rename to providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst index 3d3e95c28b17..c5e6ec128785 100644 --- a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst +++ b/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst @@ -108,7 +108,7 @@ breeze and I'll run unit tests for my Hook. .. code-block:: bash - root@fafd8d630e46:/opt/airflow# python -m pytest tests/providers//hook/test_*.py + root@fafd8d630e46:/opt/airflow# python -m pytest providers/tests//hook/test_*.py Adding chicken-egg providers ---------------------------- @@ -341,23 +341,23 @@ Example failing collection after ``google`` provider has been suspended: .. code-block:: txt - _____ ERROR collecting tests/providers/apache/beam/operators/test_beam.py ______ - ImportError while importing test module '/opt/airflow/tests/providers/apache/beam/operators/test_beam.py'. + _____ ERROR collecting providers/tests/apache/beam/operators/test_beam.py ______ + ImportError while importing test module '/opt/airflow/providers/tests/apache/beam/operators/test_beam.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: /usr/local/lib/python3.8/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) - tests/providers/apache/beam/operators/test_beam.py:25: in + providers/tests/apache/beam/operators/test_beam.py:25: in from airflow.providers.apache.beam.operators.beam import ( airflow/providers/apache/beam/operators/beam.py:35: in from airflow.providers.google.cloud.hooks.dataflow import ( airflow/providers/google/cloud/hooks/dataflow.py:32: in from google.cloud.dataflow_v1beta3 import GetJobRequest, Job, JobState, JobsV1Beta3AsyncClient, JobView E ModuleNotFoundError: No module named 'google.cloud.dataflow_v1beta3' - _ ERROR collecting tests/providers/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ + _ ERROR collecting providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ -The fix is to add this line at the top of the ``tests/providers/apache/beam/operators/test_beam.py`` module: +The fix is to add this line at the top of the ``providers/tests/apache/beam/operators/test_beam.py`` module: .. code-block:: python diff --git a/airflow/providers/airbyte/.latest-doc-only-change.txt b/providers/src/airflow/providers/airbyte/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/airbyte/.latest-doc-only-change.txt rename to providers/src/airflow/providers/airbyte/.latest-doc-only-change.txt diff --git a/airflow/providers/airbyte/CHANGELOG.rst b/providers/src/airflow/providers/airbyte/CHANGELOG.rst similarity index 100% rename from airflow/providers/airbyte/CHANGELOG.rst rename to providers/src/airflow/providers/airbyte/CHANGELOG.rst diff --git a/airflow/providers/airbyte/__init__.py b/providers/src/airflow/providers/airbyte/__init__.py similarity index 100% rename from airflow/providers/airbyte/__init__.py rename to providers/src/airflow/providers/airbyte/__init__.py diff --git a/airflow/providers/airbyte/hooks/__init__.py b/providers/src/airflow/providers/airbyte/hooks/__init__.py similarity index 100% rename from airflow/providers/airbyte/hooks/__init__.py rename to providers/src/airflow/providers/airbyte/hooks/__init__.py diff --git a/airflow/providers/airbyte/hooks/airbyte.py b/providers/src/airflow/providers/airbyte/hooks/airbyte.py similarity index 100% rename from airflow/providers/airbyte/hooks/airbyte.py rename to providers/src/airflow/providers/airbyte/hooks/airbyte.py diff --git a/airflow/providers/airbyte/operators/__init__.py b/providers/src/airflow/providers/airbyte/operators/__init__.py similarity index 100% rename from airflow/providers/airbyte/operators/__init__.py rename to providers/src/airflow/providers/airbyte/operators/__init__.py diff --git a/airflow/providers/airbyte/operators/airbyte.py b/providers/src/airflow/providers/airbyte/operators/airbyte.py similarity index 100% rename from airflow/providers/airbyte/operators/airbyte.py rename to providers/src/airflow/providers/airbyte/operators/airbyte.py diff --git a/airflow/providers/airbyte/provider.yaml b/providers/src/airflow/providers/airbyte/provider.yaml similarity index 100% rename from airflow/providers/airbyte/provider.yaml rename to providers/src/airflow/providers/airbyte/provider.yaml diff --git a/airflow/providers/alibaba/cloud/operators/__init__.py b/providers/src/airflow/providers/airbyte/sensors/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/__init__.py rename to providers/src/airflow/providers/airbyte/sensors/__init__.py diff --git a/airflow/providers/airbyte/sensors/airbyte.py b/providers/src/airflow/providers/airbyte/sensors/airbyte.py similarity index 100% rename from airflow/providers/airbyte/sensors/airbyte.py rename to providers/src/airflow/providers/airbyte/sensors/airbyte.py diff --git a/airflow/providers/alibaba/cloud/sensors/__init__.py b/providers/src/airflow/providers/airbyte/triggers/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/__init__.py rename to providers/src/airflow/providers/airbyte/triggers/__init__.py diff --git a/airflow/providers/airbyte/triggers/airbyte.py b/providers/src/airflow/providers/airbyte/triggers/airbyte.py similarity index 100% rename from airflow/providers/airbyte/triggers/airbyte.py rename to providers/src/airflow/providers/airbyte/triggers/airbyte.py diff --git a/airflow/providers/alibaba/.latest-doc-only-change.txt b/providers/src/airflow/providers/alibaba/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/alibaba/.latest-doc-only-change.txt rename to providers/src/airflow/providers/alibaba/.latest-doc-only-change.txt diff --git a/airflow/providers/alibaba/CHANGELOG.rst b/providers/src/airflow/providers/alibaba/CHANGELOG.rst similarity index 100% rename from airflow/providers/alibaba/CHANGELOG.rst rename to providers/src/airflow/providers/alibaba/CHANGELOG.rst diff --git a/airflow/providers/alibaba/__init__.py b/providers/src/airflow/providers/alibaba/__init__.py similarity index 100% rename from airflow/providers/alibaba/__init__.py rename to providers/src/airflow/providers/alibaba/__init__.py diff --git a/airflow/providers/alibaba/cloud/__init__.py b/providers/src/airflow/providers/alibaba/cloud/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/__init__.py diff --git a/airflow/providers/alibaba/cloud/hooks/__init__.py b/providers/src/airflow/providers/alibaba/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/__init__.py diff --git a/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/hooks/oss.py b/providers/src/airflow/providers/alibaba/cloud/hooks/oss.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/oss.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/oss.py diff --git a/airflow/providers/amazon/aws/__init__.py b/providers/src/airflow/providers/alibaba/cloud/log/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/log/__init__.py diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/providers/src/airflow/providers/alibaba/cloud/log/oss_task_handler.py similarity index 100% rename from airflow/providers/alibaba/cloud/log/oss_task_handler.py rename to providers/src/airflow/providers/alibaba/cloud/log/oss_task_handler.py diff --git a/airflow/providers/amazon/aws/assets/__init__.py b/providers/src/airflow/providers/alibaba/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/assets/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/operators/__init__.py diff --git a/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/operators/oss.py b/providers/src/airflow/providers/alibaba/cloud/operators/oss.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/oss.py rename to providers/src/airflow/providers/alibaba/cloud/operators/oss.py diff --git a/airflow/providers/amazon/aws/auth_manager/__init__.py b/providers/src/airflow/providers/alibaba/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/__init__.py diff --git a/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py b/providers/src/airflow/providers/alibaba/cloud/sensors/oss_key.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/oss_key.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/oss_key.py diff --git a/airflow/providers/alibaba/provider.yaml b/providers/src/airflow/providers/alibaba/provider.yaml similarity index 100% rename from airflow/providers/alibaba/provider.yaml rename to providers/src/airflow/providers/alibaba/provider.yaml diff --git a/airflow/providers/amazon/.latest-doc-only-change.txt b/providers/src/airflow/providers/amazon/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/amazon/.latest-doc-only-change.txt rename to providers/src/airflow/providers/amazon/.latest-doc-only-change.txt diff --git a/airflow/providers/amazon/CHANGELOG.rst b/providers/src/airflow/providers/amazon/CHANGELOG.rst similarity index 100% rename from airflow/providers/amazon/CHANGELOG.rst rename to providers/src/airflow/providers/amazon/CHANGELOG.rst diff --git a/airflow/providers/amazon/__init__.py b/providers/src/airflow/providers/amazon/__init__.py similarity index 100% rename from airflow/providers/amazon/__init__.py rename to providers/src/airflow/providers/amazon/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/__init__.py b/providers/src/airflow/providers/amazon/aws/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/__init__.py rename to providers/src/airflow/providers/amazon/aws/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/__init__.py b/providers/src/airflow/providers/amazon/aws/assets/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/__init__.py rename to providers/src/airflow/providers/amazon/aws/assets/__init__.py diff --git a/airflow/providers/amazon/aws/assets/s3.py b/providers/src/airflow/providers/amazon/aws/assets/s3.py similarity index 100% rename from airflow/providers/amazon/aws/assets/s3.py rename to providers/src/airflow/providers/amazon/aws/assets/s3.py diff --git a/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/views/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/views/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/entities.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/entities.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/facade.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/facade.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/schema.json b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/schema.json rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json diff --git a/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py b/providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py diff --git a/airflow/providers/amazon/aws/executors/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/definition.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/definition.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py diff --git a/airflow/providers/amazon/aws/auth_manager/constants.py b/providers/src/airflow/providers/amazon/aws/auth_manager/constants.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/constants.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/constants.py diff --git a/airflow/providers/amazon/aws/executors/utils/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py b/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py diff --git a/airflow/providers/amazon/aws/auth_manager/user.py b/providers/src/airflow/providers/amazon/aws/auth_manager/user.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/user.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/user.py diff --git a/airflow/providers/amazon/aws/fs/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/views/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/fs/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/views/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/views/auth.py b/providers/src/airflow/providers/amazon/aws/auth_manager/views/auth.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/views/auth.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/views/auth.py diff --git a/airflow/providers/amazon/aws/exceptions.py b/providers/src/airflow/providers/amazon/aws/exceptions.py similarity index 100% rename from airflow/providers/amazon/aws/exceptions.py rename to providers/src/airflow/providers/amazon/aws/exceptions.py diff --git a/airflow/providers/amazon/aws/executors/Dockerfile b/providers/src/airflow/providers/amazon/aws/executors/Dockerfile similarity index 100% rename from airflow/providers/amazon/aws/executors/Dockerfile rename to providers/src/airflow/providers/amazon/aws/executors/Dockerfile diff --git a/airflow/providers/amazon/aws/hooks/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/__init__.py diff --git a/airflow/providers/amazon/aws/executors/batch/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py diff --git a/airflow/providers/amazon/aws/executors/batch/batch_executor.py b/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/batch_executor.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py diff --git a/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py b/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/batch_executor_config.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py diff --git a/airflow/providers/amazon/aws/executors/batch/boto_schema.py b/providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/boto_schema.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py diff --git a/airflow/providers/amazon/aws/executors/batch/utils.py b/providers/src/airflow/providers/amazon/aws/executors/batch/utils.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/utils.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/utils.py diff --git a/airflow/providers/amazon/aws/executors/ecs/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py diff --git a/airflow/providers/amazon/aws/executors/ecs/boto_schema.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/boto_schema.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py diff --git a/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/ecs_executor.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py diff --git a/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py diff --git a/airflow/providers/amazon/aws/executors/ecs/utils.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/utils.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py diff --git a/airflow/providers/amazon/aws/links/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/links/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py diff --git a/airflow/providers/amazon/aws/executors/utils/base_config_keys.py b/providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/base_config_keys.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py diff --git a/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py b/providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py diff --git a/airflow/providers/amazon/aws/log/__init__.py b/providers/src/airflow/providers/amazon/aws/fs/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/log/__init__.py rename to providers/src/airflow/providers/amazon/aws/fs/__init__.py diff --git a/airflow/providers/amazon/aws/fs/s3.py b/providers/src/airflow/providers/amazon/aws/fs/s3.py similarity index 100% rename from airflow/providers/amazon/aws/fs/s3.py rename to providers/src/airflow/providers/amazon/aws/fs/s3.py diff --git a/airflow/providers/amazon/aws/notifications/__init__.py b/providers/src/airflow/providers/amazon/aws/hooks/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/__init__.py rename to providers/src/airflow/providers/amazon/aws/hooks/__init__.py diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/providers/src/airflow/providers/amazon/aws/hooks/appflow.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/appflow.py rename to providers/src/airflow/providers/amazon/aws/hooks/appflow.py diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/providers/src/airflow/providers/amazon/aws/hooks/athena.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/athena.py rename to providers/src/airflow/providers/amazon/aws/hooks/athena.py diff --git a/airflow/providers/amazon/aws/hooks/athena_sql.py b/providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/athena_sql.py rename to providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/src/airflow/providers/amazon/aws/hooks/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/base_aws.py rename to providers/src/airflow/providers/amazon/aws/hooks/base_aws.py diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/providers/src/airflow/providers/amazon/aws/hooks/batch_client.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_client.py rename to providers/src/airflow/providers/amazon/aws/hooks/batch_client.py diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.json b/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_waiters.json rename to providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.py b/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_waiters.py rename to providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py diff --git a/airflow/providers/amazon/aws/hooks/bedrock.py b/providers/src/airflow/providers/amazon/aws/hooks/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/bedrock.py rename to providers/src/airflow/providers/amazon/aws/hooks/bedrock.py diff --git a/airflow/providers/amazon/aws/hooks/chime.py b/providers/src/airflow/providers/amazon/aws/hooks/chime.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/chime.py rename to providers/src/airflow/providers/amazon/aws/hooks/chime.py diff --git a/airflow/providers/amazon/aws/hooks/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py diff --git a/airflow/providers/amazon/aws/hooks/comprehend.py b/providers/src/airflow/providers/amazon/aws/hooks/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/comprehend.py rename to providers/src/airflow/providers/amazon/aws/hooks/comprehend.py diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/providers/src/airflow/providers/amazon/aws/hooks/datasync.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/datasync.py rename to providers/src/airflow/providers/amazon/aws/hooks/datasync.py diff --git a/airflow/providers/amazon/aws/hooks/dms.py b/providers/src/airflow/providers/amazon/aws/hooks/dms.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/dms.py rename to providers/src/airflow/providers/amazon/aws/hooks/dms.py diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/dynamodb.py rename to providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py diff --git a/airflow/providers/amazon/aws/hooks/ec2.py b/providers/src/airflow/providers/amazon/aws/hooks/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ec2.py rename to providers/src/airflow/providers/amazon/aws/hooks/ec2.py diff --git a/airflow/providers/amazon/aws/hooks/ecr.py b/providers/src/airflow/providers/amazon/aws/hooks/ecr.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ecr.py rename to providers/src/airflow/providers/amazon/aws/hooks/ecr.py diff --git a/airflow/providers/amazon/aws/hooks/ecs.py b/providers/src/airflow/providers/amazon/aws/hooks/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ecs.py rename to providers/src/airflow/providers/amazon/aws/hooks/ecs.py diff --git a/airflow/providers/amazon/aws/hooks/eks.py b/providers/src/airflow/providers/amazon/aws/hooks/eks.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/eks.py rename to providers/src/airflow/providers/amazon/aws/hooks/eks.py diff --git a/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py b/providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/elasticache_replication_group.py rename to providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py diff --git a/airflow/providers/amazon/aws/hooks/emr.py b/providers/src/airflow/providers/amazon/aws/hooks/emr.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/emr.py rename to providers/src/airflow/providers/amazon/aws/hooks/emr.py diff --git a/airflow/providers/amazon/aws/hooks/eventbridge.py b/providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/eventbridge.py rename to providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py diff --git a/airflow/providers/amazon/aws/hooks/glacier.py b/providers/src/airflow/providers/amazon/aws/hooks/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glacier.py rename to providers/src/airflow/providers/amazon/aws/hooks/glacier.py diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/providers/src/airflow/providers/amazon/aws/hooks/glue.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue.py diff --git a/airflow/providers/amazon/aws/hooks/glue_catalog.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_catalog.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py diff --git a/airflow/providers/amazon/aws/hooks/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py diff --git a/airflow/providers/amazon/aws/hooks/kinesis.py b/providers/src/airflow/providers/amazon/aws/hooks/kinesis.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/kinesis.py rename to providers/src/airflow/providers/amazon/aws/hooks/kinesis.py diff --git a/airflow/providers/amazon/aws/hooks/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/hooks/lambda_function.py b/providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py diff --git a/airflow/providers/amazon/aws/hooks/logs.py b/providers/src/airflow/providers/amazon/aws/hooks/logs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/logs.py rename to providers/src/airflow/providers/amazon/aws/hooks/logs.py diff --git a/airflow/providers/amazon/aws/hooks/neptune.py b/providers/src/airflow/providers/amazon/aws/hooks/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/neptune.py rename to providers/src/airflow/providers/amazon/aws/hooks/neptune.py diff --git a/airflow/providers/amazon/aws/hooks/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py b/providers/src/airflow/providers/amazon/aws/hooks/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/quicksight.py rename to providers/src/airflow/providers/amazon/aws/hooks/quicksight.py diff --git a/airflow/providers/amazon/aws/hooks/rds.py b/providers/src/airflow/providers/amazon/aws/hooks/rds.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/rds.py rename to providers/src/airflow/providers/amazon/aws/hooks/rds.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_data.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_sql.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_sql.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/providers/src/airflow/providers/amazon/aws/hooks/s3.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/s3.py rename to providers/src/airflow/providers/amazon/aws/hooks/s3.py diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py diff --git a/airflow/providers/amazon/aws/hooks/secrets_manager.py b/providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/secrets_manager.py rename to providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/providers/src/airflow/providers/amazon/aws/hooks/ses.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ses.py rename to providers/src/airflow/providers/amazon/aws/hooks/ses.py diff --git a/airflow/providers/amazon/aws/hooks/sns.py b/providers/src/airflow/providers/amazon/aws/hooks/sns.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sns.py rename to providers/src/airflow/providers/amazon/aws/hooks/sns.py diff --git a/airflow/providers/amazon/aws/hooks/sqs.py b/providers/src/airflow/providers/amazon/aws/hooks/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sqs.py rename to providers/src/airflow/providers/amazon/aws/hooks/sqs.py diff --git a/airflow/providers/amazon/aws/hooks/ssm.py b/providers/src/airflow/providers/amazon/aws/hooks/ssm.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ssm.py rename to providers/src/airflow/providers/amazon/aws/hooks/ssm.py diff --git a/airflow/providers/amazon/aws/hooks/step_function.py b/providers/src/airflow/providers/amazon/aws/hooks/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/step_function.py rename to providers/src/airflow/providers/amazon/aws/hooks/step_function.py diff --git a/airflow/providers/amazon/aws/hooks/sts.py b/providers/src/airflow/providers/amazon/aws/hooks/sts.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sts.py rename to providers/src/airflow/providers/amazon/aws/hooks/sts.py diff --git a/airflow/providers/amazon/aws/hooks/verified_permissions.py b/providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/verified_permissions.py rename to providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py diff --git a/airflow/providers/amazon/aws/operators/__init__.py b/providers/src/airflow/providers/amazon/aws/links/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/operators/__init__.py rename to providers/src/airflow/providers/amazon/aws/links/__init__.py diff --git a/airflow/providers/amazon/aws/links/athena.py b/providers/src/airflow/providers/amazon/aws/links/athena.py similarity index 100% rename from airflow/providers/amazon/aws/links/athena.py rename to providers/src/airflow/providers/amazon/aws/links/athena.py diff --git a/airflow/providers/amazon/aws/links/base_aws.py b/providers/src/airflow/providers/amazon/aws/links/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/links/base_aws.py rename to providers/src/airflow/providers/amazon/aws/links/base_aws.py diff --git a/airflow/providers/amazon/aws/links/batch.py b/providers/src/airflow/providers/amazon/aws/links/batch.py similarity index 100% rename from airflow/providers/amazon/aws/links/batch.py rename to providers/src/airflow/providers/amazon/aws/links/batch.py diff --git a/airflow/providers/amazon/aws/links/emr.py b/providers/src/airflow/providers/amazon/aws/links/emr.py similarity index 100% rename from airflow/providers/amazon/aws/links/emr.py rename to providers/src/airflow/providers/amazon/aws/links/emr.py diff --git a/airflow/providers/amazon/aws/links/glue.py b/providers/src/airflow/providers/amazon/aws/links/glue.py similarity index 100% rename from airflow/providers/amazon/aws/links/glue.py rename to providers/src/airflow/providers/amazon/aws/links/glue.py diff --git a/airflow/providers/amazon/aws/links/logs.py b/providers/src/airflow/providers/amazon/aws/links/logs.py similarity index 100% rename from airflow/providers/amazon/aws/links/logs.py rename to providers/src/airflow/providers/amazon/aws/links/logs.py diff --git a/airflow/providers/amazon/aws/links/step_function.py b/providers/src/airflow/providers/amazon/aws/links/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/links/step_function.py rename to providers/src/airflow/providers/amazon/aws/links/step_function.py diff --git a/airflow/providers/amazon/aws/secrets/__init__.py b/providers/src/airflow/providers/amazon/aws/log/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/__init__.py rename to providers/src/airflow/providers/amazon/aws/log/__init__.py diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py similarity index 100% rename from airflow/providers/amazon/aws/log/cloudwatch_task_handler.py rename to providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py similarity index 100% rename from airflow/providers/amazon/aws/log/s3_task_handler.py rename to providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py diff --git a/airflow/providers/amazon/aws/sensors/__init__.py b/providers/src/airflow/providers/amazon/aws/notifications/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/__init__.py rename to providers/src/airflow/providers/amazon/aws/notifications/__init__.py diff --git a/airflow/providers/amazon/aws/notifications/chime.py b/providers/src/airflow/providers/amazon/aws/notifications/chime.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/chime.py rename to providers/src/airflow/providers/amazon/aws/notifications/chime.py diff --git a/airflow/providers/amazon/aws/notifications/sns.py b/providers/src/airflow/providers/amazon/aws/notifications/sns.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/sns.py rename to providers/src/airflow/providers/amazon/aws/notifications/sns.py diff --git a/airflow/providers/amazon/aws/notifications/sqs.py b/providers/src/airflow/providers/amazon/aws/notifications/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/sqs.py rename to providers/src/airflow/providers/amazon/aws/notifications/sqs.py diff --git a/airflow/providers/amazon/aws/transfers/__init__.py b/providers/src/airflow/providers/amazon/aws/operators/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/__init__.py rename to providers/src/airflow/providers/amazon/aws/operators/__init__.py diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/providers/src/airflow/providers/amazon/aws/operators/appflow.py similarity index 100% rename from airflow/providers/amazon/aws/operators/appflow.py rename to providers/src/airflow/providers/amazon/aws/operators/appflow.py diff --git a/airflow/providers/amazon/aws/operators/athena.py b/providers/src/airflow/providers/amazon/aws/operators/athena.py similarity index 100% rename from airflow/providers/amazon/aws/operators/athena.py rename to providers/src/airflow/providers/amazon/aws/operators/athena.py diff --git a/airflow/providers/amazon/aws/operators/base_aws.py b/providers/src/airflow/providers/amazon/aws/operators/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/operators/base_aws.py rename to providers/src/airflow/providers/amazon/aws/operators/base_aws.py diff --git a/airflow/providers/amazon/aws/operators/batch.py b/providers/src/airflow/providers/amazon/aws/operators/batch.py similarity index 100% rename from airflow/providers/amazon/aws/operators/batch.py rename to providers/src/airflow/providers/amazon/aws/operators/batch.py diff --git a/airflow/providers/amazon/aws/operators/bedrock.py b/providers/src/airflow/providers/amazon/aws/operators/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/operators/bedrock.py rename to providers/src/airflow/providers/amazon/aws/operators/bedrock.py diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/operators/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py diff --git a/airflow/providers/amazon/aws/operators/comprehend.py b/providers/src/airflow/providers/amazon/aws/operators/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/operators/comprehend.py rename to providers/src/airflow/providers/amazon/aws/operators/comprehend.py diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/providers/src/airflow/providers/amazon/aws/operators/datasync.py similarity index 100% rename from airflow/providers/amazon/aws/operators/datasync.py rename to providers/src/airflow/providers/amazon/aws/operators/datasync.py diff --git a/airflow/providers/amazon/aws/operators/dms.py b/providers/src/airflow/providers/amazon/aws/operators/dms.py similarity index 100% rename from airflow/providers/amazon/aws/operators/dms.py rename to providers/src/airflow/providers/amazon/aws/operators/dms.py diff --git a/airflow/providers/amazon/aws/operators/ec2.py b/providers/src/airflow/providers/amazon/aws/operators/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/operators/ec2.py rename to providers/src/airflow/providers/amazon/aws/operators/ec2.py diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/providers/src/airflow/providers/amazon/aws/operators/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/operators/ecs.py rename to providers/src/airflow/providers/amazon/aws/operators/ecs.py diff --git a/airflow/providers/amazon/aws/operators/eks.py b/providers/src/airflow/providers/amazon/aws/operators/eks.py similarity index 100% rename from airflow/providers/amazon/aws/operators/eks.py rename to providers/src/airflow/providers/amazon/aws/operators/eks.py diff --git a/airflow/providers/amazon/aws/operators/emr.py b/providers/src/airflow/providers/amazon/aws/operators/emr.py similarity index 100% rename from airflow/providers/amazon/aws/operators/emr.py rename to providers/src/airflow/providers/amazon/aws/operators/emr.py diff --git a/airflow/providers/amazon/aws/operators/eventbridge.py b/providers/src/airflow/providers/amazon/aws/operators/eventbridge.py similarity index 100% rename from airflow/providers/amazon/aws/operators/eventbridge.py rename to providers/src/airflow/providers/amazon/aws/operators/eventbridge.py diff --git a/airflow/providers/amazon/aws/operators/glacier.py b/providers/src/airflow/providers/amazon/aws/operators/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glacier.py rename to providers/src/airflow/providers/amazon/aws/operators/glacier.py diff --git a/airflow/providers/amazon/aws/operators/glue.py b/providers/src/airflow/providers/amazon/aws/operators/glue.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue.py rename to providers/src/airflow/providers/amazon/aws/operators/glue.py diff --git a/airflow/providers/amazon/aws/operators/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py diff --git a/airflow/providers/amazon/aws/operators/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py diff --git a/airflow/providers/amazon/aws/operators/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/operators/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/operators/lambda_function.py b/providers/src/airflow/providers/amazon/aws/operators/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/operators/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/operators/lambda_function.py diff --git a/airflow/providers/amazon/aws/operators/neptune.py b/providers/src/airflow/providers/amazon/aws/operators/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/operators/neptune.py rename to providers/src/airflow/providers/amazon/aws/operators/neptune.py diff --git a/airflow/providers/amazon/aws/operators/quicksight.py b/providers/src/airflow/providers/amazon/aws/operators/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/operators/quicksight.py rename to providers/src/airflow/providers/amazon/aws/operators/quicksight.py diff --git a/airflow/providers/amazon/aws/operators/rds.py b/providers/src/airflow/providers/amazon/aws/operators/rds.py similarity index 100% rename from airflow/providers/amazon/aws/operators/rds.py rename to providers/src/airflow/providers/amazon/aws/operators/rds.py diff --git a/airflow/providers/amazon/aws/operators/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/operators/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/operators/redshift_data.py b/providers/src/airflow/providers/amazon/aws/operators/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/operators/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/operators/redshift_data.py diff --git a/airflow/providers/amazon/aws/operators/s3.py b/providers/src/airflow/providers/amazon/aws/operators/s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/s3.py rename to providers/src/airflow/providers/amazon/aws/operators/s3.py diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py b/providers/src/airflow/providers/amazon/aws/operators/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/operators/sagemaker.py diff --git a/airflow/providers/amazon/aws/operators/sns.py b/providers/src/airflow/providers/amazon/aws/operators/sns.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sns.py rename to providers/src/airflow/providers/amazon/aws/operators/sns.py diff --git a/airflow/providers/amazon/aws/operators/sqs.py b/providers/src/airflow/providers/amazon/aws/operators/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sqs.py rename to providers/src/airflow/providers/amazon/aws/operators/sqs.py diff --git a/airflow/providers/amazon/aws/operators/step_function.py b/providers/src/airflow/providers/amazon/aws/operators/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/operators/step_function.py rename to providers/src/airflow/providers/amazon/aws/operators/step_function.py diff --git a/airflow/providers/amazon/aws/waiters/__init__.py b/providers/src/airflow/providers/amazon/aws/secrets/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/waiters/__init__.py rename to providers/src/airflow/providers/amazon/aws/secrets/__init__.py diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/secrets_manager.py rename to providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/systems_manager.py rename to providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py diff --git a/airflow/providers/apache/__init__.py b/providers/src/airflow/providers/amazon/aws/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/__init__.py rename to providers/src/airflow/providers/amazon/aws/sensors/__init__.py diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/providers/src/airflow/providers/amazon/aws/sensors/athena.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/athena.py rename to providers/src/airflow/providers/amazon/aws/sensors/athena.py diff --git a/airflow/providers/amazon/aws/sensors/base_aws.py b/providers/src/airflow/providers/amazon/aws/sensors/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/base_aws.py rename to providers/src/airflow/providers/amazon/aws/sensors/base_aws.py diff --git a/airflow/providers/amazon/aws/sensors/batch.py b/providers/src/airflow/providers/amazon/aws/sensors/batch.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/batch.py rename to providers/src/airflow/providers/amazon/aws/sensors/batch.py diff --git a/airflow/providers/amazon/aws/sensors/bedrock.py b/providers/src/airflow/providers/amazon/aws/sensors/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/bedrock.py rename to providers/src/airflow/providers/amazon/aws/sensors/bedrock.py diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py diff --git a/airflow/providers/amazon/aws/sensors/comprehend.py b/providers/src/airflow/providers/amazon/aws/sensors/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/comprehend.py rename to providers/src/airflow/providers/amazon/aws/sensors/comprehend.py diff --git a/airflow/providers/amazon/aws/sensors/dms.py b/providers/src/airflow/providers/amazon/aws/sensors/dms.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/dms.py rename to providers/src/airflow/providers/amazon/aws/sensors/dms.py diff --git a/airflow/providers/amazon/aws/sensors/dynamodb.py b/providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/dynamodb.py rename to providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py diff --git a/airflow/providers/amazon/aws/sensors/ec2.py b/providers/src/airflow/providers/amazon/aws/sensors/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/ec2.py rename to providers/src/airflow/providers/amazon/aws/sensors/ec2.py diff --git a/airflow/providers/amazon/aws/sensors/ecs.py b/providers/src/airflow/providers/amazon/aws/sensors/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/ecs.py rename to providers/src/airflow/providers/amazon/aws/sensors/ecs.py diff --git a/airflow/providers/amazon/aws/sensors/eks.py b/providers/src/airflow/providers/amazon/aws/sensors/eks.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/eks.py rename to providers/src/airflow/providers/amazon/aws/sensors/eks.py diff --git a/airflow/providers/amazon/aws/sensors/emr.py b/providers/src/airflow/providers/amazon/aws/sensors/emr.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/emr.py rename to providers/src/airflow/providers/amazon/aws/sensors/emr.py diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/providers/src/airflow/providers/amazon/aws/sensors/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glacier.py rename to providers/src/airflow/providers/amazon/aws/sensors/glacier.py diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/providers/src/airflow/providers/amazon/aws/sensors/glue.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue.py diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue_catalog_partition.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py diff --git a/airflow/providers/amazon/aws/sensors/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py diff --git a/airflow/providers/amazon/aws/sensors/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/sensors/lambda_function.py b/providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py diff --git a/airflow/providers/amazon/aws/sensors/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/sensors/quicksight.py b/providers/src/airflow/providers/amazon/aws/sensors/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/quicksight.py rename to providers/src/airflow/providers/amazon/aws/sensors/quicksight.py diff --git a/airflow/providers/amazon/aws/sensors/rds.py b/providers/src/airflow/providers/amazon/aws/sensors/rds.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/rds.py rename to providers/src/airflow/providers/amazon/aws/sensors/rds.py diff --git a/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/sensors/s3.py b/providers/src/airflow/providers/amazon/aws/sensors/s3.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/s3.py rename to providers/src/airflow/providers/amazon/aws/sensors/s3.py diff --git a/airflow/providers/amazon/aws/sensors/sagemaker.py b/providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/providers/src/airflow/providers/amazon/aws/sensors/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/sqs.py rename to providers/src/airflow/providers/amazon/aws/sensors/sqs.py diff --git a/airflow/providers/amazon/aws/sensors/step_function.py b/providers/src/airflow/providers/amazon/aws/sensors/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/step_function.py rename to providers/src/airflow/providers/amazon/aws/sensors/step_function.py diff --git a/airflow/providers/apache/beam/triggers/__init__.py b/providers/src/airflow/providers/amazon/aws/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/beam/triggers/__init__.py rename to providers/src/airflow/providers/amazon/aws/transfers/__init__.py diff --git a/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/base.py b/providers/src/airflow/providers/amazon/aws/transfers/base.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/base.py rename to providers/src/airflow/providers/amazon/aws/transfers/base.py diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/exasol_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/ftp_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/ftp_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/gcs_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/glacier_to_gcs.py rename to providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py diff --git a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/google_api_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py rename to providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py diff --git a/airflow/providers/amazon/aws/transfers/http_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/http_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/local_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/local_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/mongo_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/redshift_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_ftp.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_ftp.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_redshift.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_sftp.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_sql.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py diff --git a/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/salesforce_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/sftp_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/sql_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py diff --git a/airflow/providers/amazon/aws/triggers/README.md b/providers/src/airflow/providers/amazon/aws/triggers/README.md similarity index 100% rename from airflow/providers/amazon/aws/triggers/README.md rename to providers/src/airflow/providers/amazon/aws/triggers/README.md diff --git a/airflow/providers/amazon/aws/triggers/__init__.py b/providers/src/airflow/providers/amazon/aws/triggers/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/__init__.py rename to providers/src/airflow/providers/amazon/aws/triggers/__init__.py diff --git a/airflow/providers/amazon/aws/triggers/athena.py b/providers/src/airflow/providers/amazon/aws/triggers/athena.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/athena.py rename to providers/src/airflow/providers/amazon/aws/triggers/athena.py diff --git a/airflow/providers/amazon/aws/triggers/base.py b/providers/src/airflow/providers/amazon/aws/triggers/base.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/base.py rename to providers/src/airflow/providers/amazon/aws/triggers/base.py diff --git a/airflow/providers/amazon/aws/triggers/batch.py b/providers/src/airflow/providers/amazon/aws/triggers/batch.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/batch.py rename to providers/src/airflow/providers/amazon/aws/triggers/batch.py diff --git a/airflow/providers/amazon/aws/triggers/bedrock.py b/providers/src/airflow/providers/amazon/aws/triggers/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/bedrock.py rename to providers/src/airflow/providers/amazon/aws/triggers/bedrock.py diff --git a/airflow/providers/amazon/aws/triggers/comprehend.py b/providers/src/airflow/providers/amazon/aws/triggers/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/comprehend.py rename to providers/src/airflow/providers/amazon/aws/triggers/comprehend.py diff --git a/airflow/providers/amazon/aws/triggers/ec2.py b/providers/src/airflow/providers/amazon/aws/triggers/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/ec2.py rename to providers/src/airflow/providers/amazon/aws/triggers/ec2.py diff --git a/airflow/providers/amazon/aws/triggers/ecs.py b/providers/src/airflow/providers/amazon/aws/triggers/ecs.py similarity index 96% rename from airflow/providers/amazon/aws/triggers/ecs.py rename to providers/src/airflow/providers/amazon/aws/triggers/ecs.py index dd86899f2200..11c9cf18043c 100644 --- a/airflow/providers/amazon/aws/triggers/ecs.py +++ b/providers/src/airflow/providers/amazon/aws/triggers/ecs.py @@ -18,7 +18,8 @@ from __future__ import annotations import asyncio -from typing import TYPE_CHECKING, Any, AsyncIterator +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any from botocore.exceptions import ClientError, WaiterError @@ -165,11 +166,10 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: - async with EcsHook( - aws_conn_id=self.aws_conn_id, region_name=self.region - ).async_conn as ecs_client, AwsLogsHook( - aws_conn_id=self.aws_conn_id, region_name=self.region - ).async_conn as logs_client: + async with ( + EcsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as ecs_client, + AwsLogsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as logs_client, + ): waiter = ecs_client.get_waiter("tasks_stopped") logs_token = None while self.waiter_max_attempts: diff --git a/airflow/providers/amazon/aws/triggers/eks.py b/providers/src/airflow/providers/amazon/aws/triggers/eks.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/eks.py rename to providers/src/airflow/providers/amazon/aws/triggers/eks.py diff --git a/airflow/providers/amazon/aws/triggers/emr.py b/providers/src/airflow/providers/amazon/aws/triggers/emr.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/emr.py rename to providers/src/airflow/providers/amazon/aws/triggers/emr.py diff --git a/airflow/providers/amazon/aws/triggers/glue.py b/providers/src/airflow/providers/amazon/aws/triggers/glue.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue.py diff --git a/airflow/providers/amazon/aws/triggers/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py diff --git a/airflow/providers/amazon/aws/triggers/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py diff --git a/airflow/providers/amazon/aws/triggers/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/triggers/lambda_function.py b/providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py diff --git a/airflow/providers/amazon/aws/triggers/neptune.py b/providers/src/airflow/providers/amazon/aws/triggers/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/neptune.py rename to providers/src/airflow/providers/amazon/aws/triggers/neptune.py diff --git a/airflow/providers/amazon/aws/triggers/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/triggers/rds.py b/providers/src/airflow/providers/amazon/aws/triggers/rds.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/rds.py rename to providers/src/airflow/providers/amazon/aws/triggers/rds.py diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/triggers/redshift_data.py b/providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py diff --git a/airflow/providers/amazon/aws/triggers/s3.py b/providers/src/airflow/providers/amazon/aws/triggers/s3.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/s3.py rename to providers/src/airflow/providers/amazon/aws/triggers/s3.py diff --git a/airflow/providers/amazon/aws/triggers/sagemaker.py b/providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py diff --git a/airflow/providers/amazon/aws/triggers/sqs.py b/providers/src/airflow/providers/amazon/aws/triggers/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/sqs.py rename to providers/src/airflow/providers/amazon/aws/triggers/sqs.py diff --git a/airflow/providers/amazon/aws/triggers/step_function.py b/providers/src/airflow/providers/amazon/aws/triggers/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/step_function.py rename to providers/src/airflow/providers/amazon/aws/triggers/step_function.py diff --git a/airflow/providers/amazon/aws/utils/__init__.py b/providers/src/airflow/providers/amazon/aws/utils/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/utils/__init__.py rename to providers/src/airflow/providers/amazon/aws/utils/__init__.py diff --git a/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py b/providers/src/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py similarity index 100% rename from airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py rename to providers/src/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py similarity index 100% rename from airflow/providers/amazon/aws/utils/connection_wrapper.py rename to providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py diff --git a/airflow/providers/amazon/aws/utils/eks_get_token.py b/providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py similarity index 100% rename from airflow/providers/amazon/aws/utils/eks_get_token.py rename to providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py diff --git a/airflow/providers/amazon/aws/utils/emailer.py b/providers/src/airflow/providers/amazon/aws/utils/emailer.py similarity index 100% rename from airflow/providers/amazon/aws/utils/emailer.py rename to providers/src/airflow/providers/amazon/aws/utils/emailer.py diff --git a/airflow/providers/amazon/aws/utils/identifiers.py b/providers/src/airflow/providers/amazon/aws/utils/identifiers.py similarity index 100% rename from airflow/providers/amazon/aws/utils/identifiers.py rename to providers/src/airflow/providers/amazon/aws/utils/identifiers.py diff --git a/airflow/providers/amazon/aws/utils/mixins.py b/providers/src/airflow/providers/amazon/aws/utils/mixins.py similarity index 100% rename from airflow/providers/amazon/aws/utils/mixins.py rename to providers/src/airflow/providers/amazon/aws/utils/mixins.py diff --git a/airflow/providers/amazon/aws/utils/openlineage.py b/providers/src/airflow/providers/amazon/aws/utils/openlineage.py similarity index 100% rename from airflow/providers/amazon/aws/utils/openlineage.py rename to providers/src/airflow/providers/amazon/aws/utils/openlineage.py diff --git a/airflow/providers/amazon/aws/utils/rds.py b/providers/src/airflow/providers/amazon/aws/utils/rds.py similarity index 100% rename from airflow/providers/amazon/aws/utils/rds.py rename to providers/src/airflow/providers/amazon/aws/utils/rds.py diff --git a/airflow/providers/amazon/aws/utils/redshift.py b/providers/src/airflow/providers/amazon/aws/utils/redshift.py similarity index 100% rename from airflow/providers/amazon/aws/utils/redshift.py rename to providers/src/airflow/providers/amazon/aws/utils/redshift.py diff --git a/airflow/providers/amazon/aws/utils/sagemaker.py b/providers/src/airflow/providers/amazon/aws/utils/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/utils/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/utils/sagemaker.py diff --git a/airflow/providers/amazon/aws/utils/sqs.py b/providers/src/airflow/providers/amazon/aws/utils/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/utils/sqs.py rename to providers/src/airflow/providers/amazon/aws/utils/sqs.py diff --git a/airflow/providers/amazon/aws/utils/suppress.py b/providers/src/airflow/providers/amazon/aws/utils/suppress.py similarity index 100% rename from airflow/providers/amazon/aws/utils/suppress.py rename to providers/src/airflow/providers/amazon/aws/utils/suppress.py diff --git a/airflow/providers/amazon/aws/utils/tags.py b/providers/src/airflow/providers/amazon/aws/utils/tags.py similarity index 100% rename from airflow/providers/amazon/aws/utils/tags.py rename to providers/src/airflow/providers/amazon/aws/utils/tags.py diff --git a/airflow/providers/amazon/aws/utils/task_log_fetcher.py b/providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py similarity index 100% rename from airflow/providers/amazon/aws/utils/task_log_fetcher.py rename to providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py diff --git a/airflow/providers/amazon/aws/utils/waiter.py b/providers/src/airflow/providers/amazon/aws/utils/waiter.py similarity index 100% rename from airflow/providers/amazon/aws/utils/waiter.py rename to providers/src/airflow/providers/amazon/aws/utils/waiter.py diff --git a/airflow/providers/amazon/aws/utils/waiter_with_logging.py b/providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py similarity index 100% rename from airflow/providers/amazon/aws/utils/waiter_with_logging.py rename to providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py diff --git a/airflow/providers/amazon/aws/waiters/README.md b/providers/src/airflow/providers/amazon/aws/waiters/README.md similarity index 100% rename from airflow/providers/amazon/aws/waiters/README.md rename to providers/src/airflow/providers/amazon/aws/waiters/README.md diff --git a/airflow/providers/apache/cassandra/hooks/__init__.py b/providers/src/airflow/providers/amazon/aws/waiters/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/hooks/__init__.py rename to providers/src/airflow/providers/amazon/aws/waiters/__init__.py diff --git a/airflow/providers/amazon/aws/waiters/appflow.json b/providers/src/airflow/providers/amazon/aws/waiters/appflow.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/appflow.json rename to providers/src/airflow/providers/amazon/aws/waiters/appflow.json diff --git a/airflow/providers/amazon/aws/waiters/athena.json b/providers/src/airflow/providers/amazon/aws/waiters/athena.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/athena.json rename to providers/src/airflow/providers/amazon/aws/waiters/athena.json diff --git a/airflow/providers/amazon/aws/waiters/base_waiter.py b/providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py similarity index 100% rename from airflow/providers/amazon/aws/waiters/base_waiter.py rename to providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py diff --git a/airflow/providers/amazon/aws/waiters/batch.json b/providers/src/airflow/providers/amazon/aws/waiters/batch.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/batch.json rename to providers/src/airflow/providers/amazon/aws/waiters/batch.json diff --git a/airflow/providers/amazon/aws/waiters/bedrock-agent.json b/providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/bedrock-agent.json rename to providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json diff --git a/airflow/providers/amazon/aws/waiters/bedrock.json b/providers/src/airflow/providers/amazon/aws/waiters/bedrock.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/bedrock.json rename to providers/src/airflow/providers/amazon/aws/waiters/bedrock.json diff --git a/airflow/providers/amazon/aws/waiters/comprehend.json b/providers/src/airflow/providers/amazon/aws/waiters/comprehend.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/comprehend.json rename to providers/src/airflow/providers/amazon/aws/waiters/comprehend.json diff --git a/airflow/providers/amazon/aws/waiters/databrew.json b/providers/src/airflow/providers/amazon/aws/waiters/databrew.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/databrew.json rename to providers/src/airflow/providers/amazon/aws/waiters/databrew.json diff --git a/airflow/providers/amazon/aws/waiters/dynamodb.json b/providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/dynamodb.json rename to providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json diff --git a/airflow/providers/amazon/aws/waiters/ecs.json b/providers/src/airflow/providers/amazon/aws/waiters/ecs.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/ecs.json rename to providers/src/airflow/providers/amazon/aws/waiters/ecs.json diff --git a/airflow/providers/amazon/aws/waiters/eks.json b/providers/src/airflow/providers/amazon/aws/waiters/eks.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/eks.json rename to providers/src/airflow/providers/amazon/aws/waiters/eks.json diff --git a/airflow/providers/amazon/aws/waiters/emr-containers.json b/providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr-containers.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json diff --git a/airflow/providers/amazon/aws/waiters/emr-serverless.json b/providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr-serverless.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json diff --git a/airflow/providers/amazon/aws/waiters/emr.json b/providers/src/airflow/providers/amazon/aws/waiters/emr.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr.json diff --git a/airflow/providers/amazon/aws/waiters/glue.json b/providers/src/airflow/providers/amazon/aws/waiters/glue.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/glue.json rename to providers/src/airflow/providers/amazon/aws/waiters/glue.json diff --git a/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json b/providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json rename to providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json diff --git a/airflow/providers/amazon/aws/waiters/neptune.json b/providers/src/airflow/providers/amazon/aws/waiters/neptune.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/neptune.json rename to providers/src/airflow/providers/amazon/aws/waiters/neptune.json diff --git a/airflow/providers/amazon/aws/waiters/opensearchserverless.json b/providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/opensearchserverless.json rename to providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json diff --git a/airflow/providers/amazon/aws/waiters/rds.json b/providers/src/airflow/providers/amazon/aws/waiters/rds.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/rds.json rename to providers/src/airflow/providers/amazon/aws/waiters/rds.json diff --git a/airflow/providers/amazon/aws/waiters/redshift.json b/providers/src/airflow/providers/amazon/aws/waiters/redshift.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/redshift.json rename to providers/src/airflow/providers/amazon/aws/waiters/redshift.json diff --git a/airflow/providers/amazon/aws/waiters/sagemaker.json b/providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/sagemaker.json rename to providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json diff --git a/airflow/providers/amazon/aws/waiters/stepfunctions.json b/providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/stepfunctions.json rename to providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json diff --git a/airflow/providers/amazon/provider.yaml b/providers/src/airflow/providers/amazon/provider.yaml similarity index 100% rename from airflow/providers/amazon/provider.yaml rename to providers/src/airflow/providers/amazon/provider.yaml diff --git a/airflow/providers/apache/beam/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/beam/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/beam/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/beam/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/providers/src/airflow/providers/apache/beam/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/beam/CHANGELOG.rst rename to providers/src/airflow/providers/apache/beam/CHANGELOG.rst diff --git a/airflow/providers/apache/beam/README.md b/providers/src/airflow/providers/apache/beam/README.md similarity index 100% rename from airflow/providers/apache/beam/README.md rename to providers/src/airflow/providers/apache/beam/README.md diff --git a/airflow/providers/apache/beam/__init__.py b/providers/src/airflow/providers/apache/beam/__init__.py similarity index 100% rename from airflow/providers/apache/beam/__init__.py rename to providers/src/airflow/providers/apache/beam/__init__.py diff --git a/airflow/providers/apache/beam/hooks/__init__.py b/providers/src/airflow/providers/apache/beam/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/beam/hooks/__init__.py rename to providers/src/airflow/providers/apache/beam/hooks/__init__.py diff --git a/airflow/providers/apache/beam/hooks/beam.py b/providers/src/airflow/providers/apache/beam/hooks/beam.py similarity index 100% rename from airflow/providers/apache/beam/hooks/beam.py rename to providers/src/airflow/providers/apache/beam/hooks/beam.py diff --git a/airflow/providers/apache/beam/operators/__init__.py b/providers/src/airflow/providers/apache/beam/operators/__init__.py similarity index 100% rename from airflow/providers/apache/beam/operators/__init__.py rename to providers/src/airflow/providers/apache/beam/operators/__init__.py diff --git a/airflow/providers/apache/beam/operators/beam.py b/providers/src/airflow/providers/apache/beam/operators/beam.py similarity index 100% rename from airflow/providers/apache/beam/operators/beam.py rename to providers/src/airflow/providers/apache/beam/operators/beam.py diff --git a/airflow/providers/apache/beam/provider.yaml b/providers/src/airflow/providers/apache/beam/provider.yaml similarity index 100% rename from airflow/providers/apache/beam/provider.yaml rename to providers/src/airflow/providers/apache/beam/provider.yaml diff --git a/airflow/providers/apache/cassandra/sensors/__init__.py b/providers/src/airflow/providers/apache/beam/triggers/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/__init__.py rename to providers/src/airflow/providers/apache/beam/triggers/__init__.py diff --git a/airflow/providers/apache/beam/triggers/beam.py b/providers/src/airflow/providers/apache/beam/triggers/beam.py similarity index 100% rename from airflow/providers/apache/beam/triggers/beam.py rename to providers/src/airflow/providers/apache/beam/triggers/beam.py diff --git a/airflow/providers/apache/cassandra/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/cassandra/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/cassandra/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/cassandra/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/cassandra/CHANGELOG.rst b/providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/cassandra/CHANGELOG.rst rename to providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst diff --git a/airflow/providers/apache/cassandra/__init__.py b/providers/src/airflow/providers/apache/cassandra/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/__init__.py rename to providers/src/airflow/providers/apache/cassandra/__init__.py diff --git a/airflow/providers/apache/druid/transfers/__init__.py b/providers/src/airflow/providers/apache/cassandra/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/druid/transfers/__init__.py rename to providers/src/airflow/providers/apache/cassandra/hooks/__init__.py diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py similarity index 100% rename from airflow/providers/apache/cassandra/hooks/cassandra.py rename to providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py diff --git a/airflow/providers/apache/cassandra/provider.yaml b/providers/src/airflow/providers/apache/cassandra/provider.yaml similarity index 100% rename from airflow/providers/apache/cassandra/provider.yaml rename to providers/src/airflow/providers/apache/cassandra/provider.yaml diff --git a/airflow/providers/apache/hdfs/log/__init__.py b/providers/src/airflow/providers/apache/cassandra/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/log/__init__.py rename to providers/src/airflow/providers/apache/cassandra/sensors/__init__.py diff --git a/airflow/providers/apache/cassandra/sensors/record.py b/providers/src/airflow/providers/apache/cassandra/sensors/record.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/record.py rename to providers/src/airflow/providers/apache/cassandra/sensors/record.py diff --git a/airflow/providers/apache/cassandra/sensors/table.py b/providers/src/airflow/providers/apache/cassandra/sensors/table.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/table.py rename to providers/src/airflow/providers/apache/cassandra/sensors/table.py diff --git a/airflow/providers/apache/drill/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/drill/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/drill/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/drill/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/drill/CHANGELOG.rst b/providers/src/airflow/providers/apache/drill/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/drill/CHANGELOG.rst rename to providers/src/airflow/providers/apache/drill/CHANGELOG.rst diff --git a/airflow/providers/apache/drill/__init__.py b/providers/src/airflow/providers/apache/drill/__init__.py similarity index 100% rename from airflow/providers/apache/drill/__init__.py rename to providers/src/airflow/providers/apache/drill/__init__.py diff --git a/airflow/providers/apache/drill/hooks/__init__.py b/providers/src/airflow/providers/apache/drill/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/drill/hooks/__init__.py rename to providers/src/airflow/providers/apache/drill/hooks/__init__.py diff --git a/airflow/providers/apache/drill/hooks/drill.py b/providers/src/airflow/providers/apache/drill/hooks/drill.py similarity index 100% rename from airflow/providers/apache/drill/hooks/drill.py rename to providers/src/airflow/providers/apache/drill/hooks/drill.py diff --git a/airflow/providers/apache/drill/operators/__init__.py b/providers/src/airflow/providers/apache/drill/operators/__init__.py similarity index 100% rename from airflow/providers/apache/drill/operators/__init__.py rename to providers/src/airflow/providers/apache/drill/operators/__init__.py diff --git a/airflow/providers/apache/drill/operators/drill.py b/providers/src/airflow/providers/apache/drill/operators/drill.py similarity index 100% rename from airflow/providers/apache/drill/operators/drill.py rename to providers/src/airflow/providers/apache/drill/operators/drill.py diff --git a/airflow/providers/apache/drill/provider.yaml b/providers/src/airflow/providers/apache/drill/provider.yaml similarity index 100% rename from airflow/providers/apache/drill/provider.yaml rename to providers/src/airflow/providers/apache/drill/provider.yaml diff --git a/airflow/providers/apache/druid/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/druid/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/druid/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/druid/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/druid/CHANGELOG.rst b/providers/src/airflow/providers/apache/druid/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/druid/CHANGELOG.rst rename to providers/src/airflow/providers/apache/druid/CHANGELOG.rst diff --git a/airflow/providers/apache/druid/__init__.py b/providers/src/airflow/providers/apache/druid/__init__.py similarity index 100% rename from airflow/providers/apache/druid/__init__.py rename to providers/src/airflow/providers/apache/druid/__init__.py diff --git a/airflow/providers/apache/druid/hooks/__init__.py b/providers/src/airflow/providers/apache/druid/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/druid/hooks/__init__.py rename to providers/src/airflow/providers/apache/druid/hooks/__init__.py diff --git a/airflow/providers/apache/druid/hooks/druid.py b/providers/src/airflow/providers/apache/druid/hooks/druid.py similarity index 100% rename from airflow/providers/apache/druid/hooks/druid.py rename to providers/src/airflow/providers/apache/druid/hooks/druid.py diff --git a/airflow/providers/apache/druid/operators/__init__.py b/providers/src/airflow/providers/apache/druid/operators/__init__.py similarity index 100% rename from airflow/providers/apache/druid/operators/__init__.py rename to providers/src/airflow/providers/apache/druid/operators/__init__.py diff --git a/airflow/providers/apache/druid/operators/druid.py b/providers/src/airflow/providers/apache/druid/operators/druid.py similarity index 100% rename from airflow/providers/apache/druid/operators/druid.py rename to providers/src/airflow/providers/apache/druid/operators/druid.py diff --git a/airflow/providers/apache/druid/operators/druid_check.py b/providers/src/airflow/providers/apache/druid/operators/druid_check.py similarity index 100% rename from airflow/providers/apache/druid/operators/druid_check.py rename to providers/src/airflow/providers/apache/druid/operators/druid_check.py diff --git a/airflow/providers/apache/druid/provider.yaml b/providers/src/airflow/providers/apache/druid/provider.yaml similarity index 100% rename from airflow/providers/apache/druid/provider.yaml rename to providers/src/airflow/providers/apache/druid/provider.yaml diff --git a/airflow/providers/apache/hive/transfers/__init__.py b/providers/src/airflow/providers/apache/druid/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/hive/transfers/__init__.py rename to providers/src/airflow/providers/apache/druid/transfers/__init__.py diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/providers/src/airflow/providers/apache/druid/transfers/hive_to_druid.py similarity index 100% rename from airflow/providers/apache/druid/transfers/hive_to_druid.py rename to providers/src/airflow/providers/apache/druid/transfers/hive_to_druid.py diff --git a/airflow/providers/apache/flink/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/flink/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/flink/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/flink/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/flink/CHANGELOG.rst b/providers/src/airflow/providers/apache/flink/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/flink/CHANGELOG.rst rename to providers/src/airflow/providers/apache/flink/CHANGELOG.rst diff --git a/airflow/providers/apache/flink/__init__.py b/providers/src/airflow/providers/apache/flink/__init__.py similarity index 100% rename from airflow/providers/apache/flink/__init__.py rename to providers/src/airflow/providers/apache/flink/__init__.py diff --git a/airflow/providers/apache/flink/hooks/__init__.py b/providers/src/airflow/providers/apache/flink/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/flink/hooks/__init__.py rename to providers/src/airflow/providers/apache/flink/hooks/__init__.py diff --git a/airflow/providers/apache/flink/operators/__init__.py b/providers/src/airflow/providers/apache/flink/operators/__init__.py similarity index 100% rename from airflow/providers/apache/flink/operators/__init__.py rename to providers/src/airflow/providers/apache/flink/operators/__init__.py diff --git a/airflow/providers/apache/flink/operators/flink_kubernetes.py b/providers/src/airflow/providers/apache/flink/operators/flink_kubernetes.py similarity index 100% rename from airflow/providers/apache/flink/operators/flink_kubernetes.py rename to providers/src/airflow/providers/apache/flink/operators/flink_kubernetes.py diff --git a/airflow/providers/apache/flink/provider.yaml b/providers/src/airflow/providers/apache/flink/provider.yaml similarity index 100% rename from airflow/providers/apache/flink/provider.yaml rename to providers/src/airflow/providers/apache/flink/provider.yaml diff --git a/airflow/providers/apache/flink/sensors/__init__.py b/providers/src/airflow/providers/apache/flink/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/flink/sensors/__init__.py rename to providers/src/airflow/providers/apache/flink/sensors/__init__.py diff --git a/airflow/providers/apache/flink/sensors/flink_kubernetes.py b/providers/src/airflow/providers/apache/flink/sensors/flink_kubernetes.py similarity index 100% rename from airflow/providers/apache/flink/sensors/flink_kubernetes.py rename to providers/src/airflow/providers/apache/flink/sensors/flink_kubernetes.py diff --git a/airflow/providers/apache/hdfs/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/hdfs/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/hdfs/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/hdfs/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/hdfs/CHANGELOG.rst b/providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/hdfs/CHANGELOG.rst rename to providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst diff --git a/airflow/providers/apache/hdfs/__init__.py b/providers/src/airflow/providers/apache/hdfs/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/__init__.py rename to providers/src/airflow/providers/apache/hdfs/__init__.py diff --git a/airflow/providers/apache/hdfs/hooks/__init__.py b/providers/src/airflow/providers/apache/hdfs/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/__init__.py rename to providers/src/airflow/providers/apache/hdfs/hooks/__init__.py diff --git a/airflow/providers/apache/hdfs/hooks/hdfs.py b/providers/src/airflow/providers/apache/hdfs/hooks/hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/hdfs.py rename to providers/src/airflow/providers/apache/hdfs/hooks/hdfs.py diff --git a/airflow/providers/apache/hdfs/hooks/webhdfs.py b/providers/src/airflow/providers/apache/hdfs/hooks/webhdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/webhdfs.py rename to providers/src/airflow/providers/apache/hdfs/hooks/webhdfs.py diff --git a/airflow/providers/apache/kafka/hooks/__init__.py b/providers/src/airflow/providers/apache/hdfs/log/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/__init__.py rename to providers/src/airflow/providers/apache/hdfs/log/__init__.py diff --git a/airflow/providers/apache/hdfs/log/hdfs_task_handler.py b/providers/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py similarity index 100% rename from airflow/providers/apache/hdfs/log/hdfs_task_handler.py rename to providers/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py diff --git a/airflow/providers/apache/hdfs/provider.yaml b/providers/src/airflow/providers/apache/hdfs/provider.yaml similarity index 100% rename from airflow/providers/apache/hdfs/provider.yaml rename to providers/src/airflow/providers/apache/hdfs/provider.yaml diff --git a/airflow/providers/apache/hdfs/sensors/__init__.py b/providers/src/airflow/providers/apache/hdfs/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/__init__.py rename to providers/src/airflow/providers/apache/hdfs/sensors/__init__.py diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/providers/src/airflow/providers/apache/hdfs/sensors/hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/hdfs.py rename to providers/src/airflow/providers/apache/hdfs/sensors/hdfs.py diff --git a/airflow/providers/apache/hdfs/sensors/web_hdfs.py b/providers/src/airflow/providers/apache/hdfs/sensors/web_hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/web_hdfs.py rename to providers/src/airflow/providers/apache/hdfs/sensors/web_hdfs.py diff --git a/airflow/providers/apache/hive/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/hive/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/hive/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/hive/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/hive/CHANGELOG.rst b/providers/src/airflow/providers/apache/hive/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/hive/CHANGELOG.rst rename to providers/src/airflow/providers/apache/hive/CHANGELOG.rst diff --git a/airflow/providers/apache/hive/__init__.py b/providers/src/airflow/providers/apache/hive/__init__.py similarity index 100% rename from airflow/providers/apache/hive/__init__.py rename to providers/src/airflow/providers/apache/hive/__init__.py diff --git a/airflow/providers/apache/hive/hooks/__init__.py b/providers/src/airflow/providers/apache/hive/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/hive/hooks/__init__.py rename to providers/src/airflow/providers/apache/hive/hooks/__init__.py diff --git a/airflow/providers/apache/hive/hooks/hive.py b/providers/src/airflow/providers/apache/hive/hooks/hive.py similarity index 99% rename from airflow/providers/apache/hive/hooks/hive.py rename to providers/src/airflow/providers/apache/hive/hooks/hive.py index e08c4d6128d8..d768743cd459 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/providers/src/airflow/providers/apache/hive/hooks/hive.py @@ -23,8 +23,9 @@ import socket import subprocess import time +from collections.abc import Iterable, Mapping from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import TYPE_CHECKING, Any, Iterable, Mapping +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: import pandas as pd @@ -411,9 +412,10 @@ def _infer_field_types_from_df(df: pd.DataFrame) -> dict[Any, Any]: if pandas_kwargs is None: pandas_kwargs = {} - with TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir, NamedTemporaryFile( - dir=tmp_dir, mode="w" - ) as f: + with ( + TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir, + NamedTemporaryFile(dir=tmp_dir, mode="w") as f, + ): if field_dict is None: field_dict = _infer_field_types_from_df(df) diff --git a/airflow/providers/apache/hive/macros/__init__.py b/providers/src/airflow/providers/apache/hive/macros/__init__.py similarity index 100% rename from airflow/providers/apache/hive/macros/__init__.py rename to providers/src/airflow/providers/apache/hive/macros/__init__.py diff --git a/airflow/providers/apache/hive/macros/hive.py b/providers/src/airflow/providers/apache/hive/macros/hive.py similarity index 100% rename from airflow/providers/apache/hive/macros/hive.py rename to providers/src/airflow/providers/apache/hive/macros/hive.py diff --git a/airflow/providers/apache/hive/operators/__init__.py b/providers/src/airflow/providers/apache/hive/operators/__init__.py similarity index 100% rename from airflow/providers/apache/hive/operators/__init__.py rename to providers/src/airflow/providers/apache/hive/operators/__init__.py diff --git a/airflow/providers/apache/hive/operators/hive.py b/providers/src/airflow/providers/apache/hive/operators/hive.py similarity index 100% rename from airflow/providers/apache/hive/operators/hive.py rename to providers/src/airflow/providers/apache/hive/operators/hive.py diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/providers/src/airflow/providers/apache/hive/operators/hive_stats.py similarity index 100% rename from airflow/providers/apache/hive/operators/hive_stats.py rename to providers/src/airflow/providers/apache/hive/operators/hive_stats.py diff --git a/airflow/providers/apache/hive/plugins/__init__.py b/providers/src/airflow/providers/apache/hive/plugins/__init__.py similarity index 100% rename from airflow/providers/apache/hive/plugins/__init__.py rename to providers/src/airflow/providers/apache/hive/plugins/__init__.py diff --git a/airflow/providers/apache/hive/plugins/hive.py b/providers/src/airflow/providers/apache/hive/plugins/hive.py similarity index 100% rename from airflow/providers/apache/hive/plugins/hive.py rename to providers/src/airflow/providers/apache/hive/plugins/hive.py diff --git a/airflow/providers/apache/hive/provider.yaml b/providers/src/airflow/providers/apache/hive/provider.yaml similarity index 100% rename from airflow/providers/apache/hive/provider.yaml rename to providers/src/airflow/providers/apache/hive/provider.yaml diff --git a/airflow/providers/apache/hive/sensors/__init__.py b/providers/src/airflow/providers/apache/hive/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hive/sensors/__init__.py rename to providers/src/airflow/providers/apache/hive/sensors/__init__.py diff --git a/airflow/providers/apache/hive/sensors/hive_partition.py b/providers/src/airflow/providers/apache/hive/sensors/hive_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/hive_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/hive_partition.py diff --git a/airflow/providers/apache/hive/sensors/metastore_partition.py b/providers/src/airflow/providers/apache/hive/sensors/metastore_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/metastore_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/metastore_partition.py diff --git a/airflow/providers/apache/hive/sensors/named_hive_partition.py b/providers/src/airflow/providers/apache/hive/sensors/named_hive_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/named_hive_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/named_hive_partition.py diff --git a/airflow/providers/apache/kafka/operators/__init__.py b/providers/src/airflow/providers/apache/hive/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/operators/__init__.py rename to providers/src/airflow/providers/apache/hive/transfers/__init__.py diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/providers/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py similarity index 100% rename from airflow/providers/apache/hive/transfers/hive_to_mysql.py rename to providers/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py diff --git a/airflow/providers/apache/hive/transfers/hive_to_samba.py b/providers/src/airflow/providers/apache/hive/transfers/hive_to_samba.py similarity index 100% rename from airflow/providers/apache/hive/transfers/hive_to_samba.py rename to providers/src/airflow/providers/apache/hive/transfers/hive_to_samba.py diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/mssql_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/mysql_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py similarity index 97% rename from airflow/providers/apache/hive/transfers/s3_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py index ebc953689701..6285103d370b 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -23,8 +23,9 @@ import gzip import os import tempfile +from collections.abc import Sequence from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import TYPE_CHECKING, Any, Sequence +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -161,9 +162,10 @@ def execute(self, context: Context): if self.select_expression and self.input_compressed and file_ext.lower() != ".gz": raise AirflowException("GZIP is the only compression format Amazon S3 Select supports") - with TemporaryDirectory(prefix="tmps32hive_") as tmp_dir, NamedTemporaryFile( - mode="wb", dir=tmp_dir, suffix=file_ext - ) as f: + with ( + TemporaryDirectory(prefix="tmps32hive_") as tmp_dir, + NamedTemporaryFile(mode="wb", dir=tmp_dir, suffix=file_ext) as f, + ): self.log.info("Dumping S3 key %s contents to local file %s", s3_key_object.key, f.name) if self.select_expression: option = {} diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/vertica_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py diff --git a/airflow/providers/apache/iceberg/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/iceberg/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/iceberg/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/iceberg/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/iceberg/CHANGELOG.rst b/providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/iceberg/CHANGELOG.rst rename to providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst diff --git a/airflow/providers/apache/iceberg/__init__.py b/providers/src/airflow/providers/apache/iceberg/__init__.py similarity index 100% rename from airflow/providers/apache/iceberg/__init__.py rename to providers/src/airflow/providers/apache/iceberg/__init__.py diff --git a/airflow/providers/apache/iceberg/hooks/__init__.py b/providers/src/airflow/providers/apache/iceberg/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/iceberg/hooks/__init__.py rename to providers/src/airflow/providers/apache/iceberg/hooks/__init__.py diff --git a/airflow/providers/apache/iceberg/hooks/iceberg.py b/providers/src/airflow/providers/apache/iceberg/hooks/iceberg.py similarity index 100% rename from airflow/providers/apache/iceberg/hooks/iceberg.py rename to providers/src/airflow/providers/apache/iceberg/hooks/iceberg.py diff --git a/airflow/providers/apache/iceberg/provider.yaml b/providers/src/airflow/providers/apache/iceberg/provider.yaml similarity index 100% rename from airflow/providers/apache/iceberg/provider.yaml rename to providers/src/airflow/providers/apache/iceberg/provider.yaml diff --git a/airflow/providers/apache/impala/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/impala/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/impala/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/impala/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/impala/CHANGELOG.rst b/providers/src/airflow/providers/apache/impala/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/impala/CHANGELOG.rst rename to providers/src/airflow/providers/apache/impala/CHANGELOG.rst diff --git a/airflow/providers/apache/impala/__init__.py b/providers/src/airflow/providers/apache/impala/__init__.py similarity index 100% rename from airflow/providers/apache/impala/__init__.py rename to providers/src/airflow/providers/apache/impala/__init__.py diff --git a/airflow/providers/apache/impala/hooks/__init__.py b/providers/src/airflow/providers/apache/impala/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/impala/hooks/__init__.py rename to providers/src/airflow/providers/apache/impala/hooks/__init__.py diff --git a/airflow/providers/apache/impala/hooks/impala.py b/providers/src/airflow/providers/apache/impala/hooks/impala.py similarity index 100% rename from airflow/providers/apache/impala/hooks/impala.py rename to providers/src/airflow/providers/apache/impala/hooks/impala.py diff --git a/airflow/providers/apache/impala/provider.yaml b/providers/src/airflow/providers/apache/impala/provider.yaml similarity index 100% rename from airflow/providers/apache/impala/provider.yaml rename to providers/src/airflow/providers/apache/impala/provider.yaml diff --git a/airflow/providers/apache/kafka/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/kafka/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/kafka/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/kafka/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/kafka/CHANGELOG.rst b/providers/src/airflow/providers/apache/kafka/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/kafka/CHANGELOG.rst rename to providers/src/airflow/providers/apache/kafka/CHANGELOG.rst diff --git a/airflow/providers/apache/kafka/__init__.py b/providers/src/airflow/providers/apache/kafka/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/__init__.py rename to providers/src/airflow/providers/apache/kafka/__init__.py diff --git a/airflow/providers/apache/kafka/sensors/__init__.py b/providers/src/airflow/providers/apache/kafka/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/sensors/__init__.py rename to providers/src/airflow/providers/apache/kafka/hooks/__init__.py diff --git a/airflow/providers/apache/kafka/hooks/base.py b/providers/src/airflow/providers/apache/kafka/hooks/base.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/base.py rename to providers/src/airflow/providers/apache/kafka/hooks/base.py diff --git a/airflow/providers/apache/kafka/hooks/client.py b/providers/src/airflow/providers/apache/kafka/hooks/client.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/client.py rename to providers/src/airflow/providers/apache/kafka/hooks/client.py diff --git a/airflow/providers/apache/kafka/hooks/consume.py b/providers/src/airflow/providers/apache/kafka/hooks/consume.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/consume.py rename to providers/src/airflow/providers/apache/kafka/hooks/consume.py diff --git a/airflow/providers/apache/kafka/hooks/produce.py b/providers/src/airflow/providers/apache/kafka/hooks/produce.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/produce.py rename to providers/src/airflow/providers/apache/kafka/hooks/produce.py diff --git a/airflow/providers/apache/kafka/triggers/__init__.py b/providers/src/airflow/providers/apache/kafka/operators/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/triggers/__init__.py rename to providers/src/airflow/providers/apache/kafka/operators/__init__.py diff --git a/airflow/providers/apache/kafka/operators/consume.py b/providers/src/airflow/providers/apache/kafka/operators/consume.py similarity index 100% rename from airflow/providers/apache/kafka/operators/consume.py rename to providers/src/airflow/providers/apache/kafka/operators/consume.py diff --git a/airflow/providers/apache/kafka/operators/produce.py b/providers/src/airflow/providers/apache/kafka/operators/produce.py similarity index 100% rename from airflow/providers/apache/kafka/operators/produce.py rename to providers/src/airflow/providers/apache/kafka/operators/produce.py diff --git a/airflow/providers/apache/kafka/provider.yaml b/providers/src/airflow/providers/apache/kafka/provider.yaml similarity index 100% rename from airflow/providers/apache/kafka/provider.yaml rename to providers/src/airflow/providers/apache/kafka/provider.yaml diff --git a/airflow/providers/apache/kylin/hooks/__init__.py b/providers/src/airflow/providers/apache/kafka/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/hooks/__init__.py rename to providers/src/airflow/providers/apache/kafka/sensors/__init__.py diff --git a/airflow/providers/apache/kafka/sensors/kafka.py b/providers/src/airflow/providers/apache/kafka/sensors/kafka.py similarity index 100% rename from airflow/providers/apache/kafka/sensors/kafka.py rename to providers/src/airflow/providers/apache/kafka/sensors/kafka.py diff --git a/airflow/providers/apache/kylin/operators/__init__.py b/providers/src/airflow/providers/apache/kafka/triggers/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/operators/__init__.py rename to providers/src/airflow/providers/apache/kafka/triggers/__init__.py diff --git a/airflow/providers/apache/kafka/triggers/await_message.py b/providers/src/airflow/providers/apache/kafka/triggers/await_message.py similarity index 100% rename from airflow/providers/apache/kafka/triggers/await_message.py rename to providers/src/airflow/providers/apache/kafka/triggers/await_message.py diff --git a/airflow/providers/apache/kylin/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/kylin/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/kylin/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/kylin/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/kylin/CHANGELOG.rst b/providers/src/airflow/providers/apache/kylin/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/kylin/CHANGELOG.rst rename to providers/src/airflow/providers/apache/kylin/CHANGELOG.rst diff --git a/airflow/providers/apache/kylin/__init__.py b/providers/src/airflow/providers/apache/kylin/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/__init__.py rename to providers/src/airflow/providers/apache/kylin/__init__.py diff --git a/airflow/providers/apache/livy/hooks/__init__.py b/providers/src/airflow/providers/apache/kylin/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/livy/hooks/__init__.py rename to providers/src/airflow/providers/apache/kylin/hooks/__init__.py diff --git a/airflow/providers/apache/kylin/hooks/kylin.py b/providers/src/airflow/providers/apache/kylin/hooks/kylin.py similarity index 100% rename from airflow/providers/apache/kylin/hooks/kylin.py rename to providers/src/airflow/providers/apache/kylin/hooks/kylin.py diff --git a/airflow/providers/apache/livy/operators/__init__.py b/providers/src/airflow/providers/apache/kylin/operators/__init__.py similarity index 100% rename from airflow/providers/apache/livy/operators/__init__.py rename to providers/src/airflow/providers/apache/kylin/operators/__init__.py diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/providers/src/airflow/providers/apache/kylin/operators/kylin_cube.py similarity index 100% rename from airflow/providers/apache/kylin/operators/kylin_cube.py rename to providers/src/airflow/providers/apache/kylin/operators/kylin_cube.py diff --git a/airflow/providers/apache/kylin/provider.yaml b/providers/src/airflow/providers/apache/kylin/provider.yaml similarity index 100% rename from airflow/providers/apache/kylin/provider.yaml rename to providers/src/airflow/providers/apache/kylin/provider.yaml diff --git a/airflow/providers/apache/livy/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/livy/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/livy/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/livy/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/livy/CHANGELOG.rst b/providers/src/airflow/providers/apache/livy/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/livy/CHANGELOG.rst rename to providers/src/airflow/providers/apache/livy/CHANGELOG.rst diff --git a/airflow/providers/apache/livy/__init__.py b/providers/src/airflow/providers/apache/livy/__init__.py similarity index 100% rename from airflow/providers/apache/livy/__init__.py rename to providers/src/airflow/providers/apache/livy/__init__.py diff --git a/airflow/providers/apache/livy/sensors/__init__.py b/providers/src/airflow/providers/apache/livy/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/livy/sensors/__init__.py rename to providers/src/airflow/providers/apache/livy/hooks/__init__.py diff --git a/airflow/providers/apache/livy/hooks/livy.py b/providers/src/airflow/providers/apache/livy/hooks/livy.py similarity index 100% rename from airflow/providers/apache/livy/hooks/livy.py rename to providers/src/airflow/providers/apache/livy/hooks/livy.py diff --git a/airflow/providers/apache/livy/triggers/__init__.py b/providers/src/airflow/providers/apache/livy/operators/__init__.py similarity index 100% rename from airflow/providers/apache/livy/triggers/__init__.py rename to providers/src/airflow/providers/apache/livy/operators/__init__.py diff --git a/airflow/providers/apache/livy/operators/livy.py b/providers/src/airflow/providers/apache/livy/operators/livy.py similarity index 100% rename from airflow/providers/apache/livy/operators/livy.py rename to providers/src/airflow/providers/apache/livy/operators/livy.py diff --git a/airflow/providers/apache/livy/provider.yaml b/providers/src/airflow/providers/apache/livy/provider.yaml similarity index 100% rename from airflow/providers/apache/livy/provider.yaml rename to providers/src/airflow/providers/apache/livy/provider.yaml diff --git a/airflow/providers/arangodb/example_dags/__init__.py b/providers/src/airflow/providers/apache/livy/sensors/__init__.py similarity index 100% rename from airflow/providers/arangodb/example_dags/__init__.py rename to providers/src/airflow/providers/apache/livy/sensors/__init__.py diff --git a/airflow/providers/apache/livy/sensors/livy.py b/providers/src/airflow/providers/apache/livy/sensors/livy.py similarity index 100% rename from airflow/providers/apache/livy/sensors/livy.py rename to providers/src/airflow/providers/apache/livy/sensors/livy.py diff --git a/airflow/providers/arangodb/hooks/__init__.py b/providers/src/airflow/providers/apache/livy/triggers/__init__.py similarity index 100% rename from airflow/providers/arangodb/hooks/__init__.py rename to providers/src/airflow/providers/apache/livy/triggers/__init__.py diff --git a/airflow/providers/apache/livy/triggers/livy.py b/providers/src/airflow/providers/apache/livy/triggers/livy.py similarity index 100% rename from airflow/providers/apache/livy/triggers/livy.py rename to providers/src/airflow/providers/apache/livy/triggers/livy.py diff --git a/airflow/providers/apache/pig/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/pig/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/pig/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/pig/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/pig/CHANGELOG.rst b/providers/src/airflow/providers/apache/pig/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/pig/CHANGELOG.rst rename to providers/src/airflow/providers/apache/pig/CHANGELOG.rst diff --git a/airflow/providers/apache/pig/__init__.py b/providers/src/airflow/providers/apache/pig/__init__.py similarity index 100% rename from airflow/providers/apache/pig/__init__.py rename to providers/src/airflow/providers/apache/pig/__init__.py diff --git a/airflow/providers/apache/pig/hooks/__init__.py b/providers/src/airflow/providers/apache/pig/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/pig/hooks/__init__.py rename to providers/src/airflow/providers/apache/pig/hooks/__init__.py diff --git a/airflow/providers/apache/pig/hooks/pig.py b/providers/src/airflow/providers/apache/pig/hooks/pig.py similarity index 100% rename from airflow/providers/apache/pig/hooks/pig.py rename to providers/src/airflow/providers/apache/pig/hooks/pig.py diff --git a/airflow/providers/apache/pig/operators/__init__.py b/providers/src/airflow/providers/apache/pig/operators/__init__.py similarity index 100% rename from airflow/providers/apache/pig/operators/__init__.py rename to providers/src/airflow/providers/apache/pig/operators/__init__.py diff --git a/airflow/providers/apache/pig/operators/pig.py b/providers/src/airflow/providers/apache/pig/operators/pig.py similarity index 100% rename from airflow/providers/apache/pig/operators/pig.py rename to providers/src/airflow/providers/apache/pig/operators/pig.py diff --git a/airflow/providers/apache/pig/provider.yaml b/providers/src/airflow/providers/apache/pig/provider.yaml similarity index 100% rename from airflow/providers/apache/pig/provider.yaml rename to providers/src/airflow/providers/apache/pig/provider.yaml diff --git a/airflow/providers/apache/pinot/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/pinot/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/pinot/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/pinot/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/pinot/CHANGELOG.rst b/providers/src/airflow/providers/apache/pinot/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/pinot/CHANGELOG.rst rename to providers/src/airflow/providers/apache/pinot/CHANGELOG.rst diff --git a/airflow/providers/apache/pinot/__init__.py b/providers/src/airflow/providers/apache/pinot/__init__.py similarity index 100% rename from airflow/providers/apache/pinot/__init__.py rename to providers/src/airflow/providers/apache/pinot/__init__.py diff --git a/airflow/providers/apache/pinot/hooks/__init__.py b/providers/src/airflow/providers/apache/pinot/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/pinot/hooks/__init__.py rename to providers/src/airflow/providers/apache/pinot/hooks/__init__.py diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/providers/src/airflow/providers/apache/pinot/hooks/pinot.py similarity index 100% rename from airflow/providers/apache/pinot/hooks/pinot.py rename to providers/src/airflow/providers/apache/pinot/hooks/pinot.py diff --git a/airflow/providers/apache/pinot/provider.yaml b/providers/src/airflow/providers/apache/pinot/provider.yaml similarity index 100% rename from airflow/providers/apache/pinot/provider.yaml rename to providers/src/airflow/providers/apache/pinot/provider.yaml diff --git a/airflow/providers/apache/spark/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/spark/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/spark/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/spark/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/spark/CHANGELOG.rst b/providers/src/airflow/providers/apache/spark/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/spark/CHANGELOG.rst rename to providers/src/airflow/providers/apache/spark/CHANGELOG.rst diff --git a/airflow/providers/apache/spark/__init__.py b/providers/src/airflow/providers/apache/spark/__init__.py similarity index 100% rename from airflow/providers/apache/spark/__init__.py rename to providers/src/airflow/providers/apache/spark/__init__.py diff --git a/airflow/providers/apache/spark/decorators/__init__.py b/providers/src/airflow/providers/apache/spark/decorators/__init__.py similarity index 100% rename from airflow/providers/apache/spark/decorators/__init__.py rename to providers/src/airflow/providers/apache/spark/decorators/__init__.py diff --git a/airflow/providers/apache/spark/decorators/pyspark.py b/providers/src/airflow/providers/apache/spark/decorators/pyspark.py similarity index 100% rename from airflow/providers/apache/spark/decorators/pyspark.py rename to providers/src/airflow/providers/apache/spark/decorators/pyspark.py diff --git a/airflow/providers/apache/spark/hooks/__init__.py b/providers/src/airflow/providers/apache/spark/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/spark/hooks/__init__.py rename to providers/src/airflow/providers/apache/spark/hooks/__init__.py diff --git a/airflow/providers/apache/spark/hooks/spark_connect.py b/providers/src/airflow/providers/apache/spark/hooks/spark_connect.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_connect.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_connect.py diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/providers/src/airflow/providers/apache/spark/hooks/spark_jdbc.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_jdbc.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_jdbc.py diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py b/providers/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_jdbc_script.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/providers/src/airflow/providers/apache/spark/hooks/spark_sql.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_sql.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_sql.py diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/providers/src/airflow/providers/apache/spark/hooks/spark_submit.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_submit.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_submit.py diff --git a/airflow/providers/apache/spark/operators/__init__.py b/providers/src/airflow/providers/apache/spark/operators/__init__.py similarity index 100% rename from airflow/providers/apache/spark/operators/__init__.py rename to providers/src/airflow/providers/apache/spark/operators/__init__.py diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/providers/src/airflow/providers/apache/spark/operators/spark_jdbc.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_jdbc.py rename to providers/src/airflow/providers/apache/spark/operators/spark_jdbc.py diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/providers/src/airflow/providers/apache/spark/operators/spark_sql.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_sql.py rename to providers/src/airflow/providers/apache/spark/operators/spark_sql.py diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/providers/src/airflow/providers/apache/spark/operators/spark_submit.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_submit.py rename to providers/src/airflow/providers/apache/spark/operators/spark_submit.py diff --git a/airflow/providers/apache/spark/provider.yaml b/providers/src/airflow/providers/apache/spark/provider.yaml similarity index 100% rename from airflow/providers/apache/spark/provider.yaml rename to providers/src/airflow/providers/apache/spark/provider.yaml diff --git a/airflow/providers/apprise/.latest-doc-only-change.txt b/providers/src/airflow/providers/apprise/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apprise/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apprise/.latest-doc-only-change.txt diff --git a/airflow/providers/apprise/CHANGELOG.rst b/providers/src/airflow/providers/apprise/CHANGELOG.rst similarity index 100% rename from airflow/providers/apprise/CHANGELOG.rst rename to providers/src/airflow/providers/apprise/CHANGELOG.rst diff --git a/airflow/providers/apprise/__init__.py b/providers/src/airflow/providers/apprise/__init__.py similarity index 100% rename from airflow/providers/apprise/__init__.py rename to providers/src/airflow/providers/apprise/__init__.py diff --git a/airflow/providers/apprise/hooks/__init__.py b/providers/src/airflow/providers/apprise/hooks/__init__.py similarity index 100% rename from airflow/providers/apprise/hooks/__init__.py rename to providers/src/airflow/providers/apprise/hooks/__init__.py diff --git a/airflow/providers/apprise/hooks/apprise.py b/providers/src/airflow/providers/apprise/hooks/apprise.py similarity index 100% rename from airflow/providers/apprise/hooks/apprise.py rename to providers/src/airflow/providers/apprise/hooks/apprise.py diff --git a/airflow/providers/apprise/notifications/__init__.py b/providers/src/airflow/providers/apprise/notifications/__init__.py similarity index 100% rename from airflow/providers/apprise/notifications/__init__.py rename to providers/src/airflow/providers/apprise/notifications/__init__.py diff --git a/airflow/providers/apprise/notifications/apprise.py b/providers/src/airflow/providers/apprise/notifications/apprise.py similarity index 100% rename from airflow/providers/apprise/notifications/apprise.py rename to providers/src/airflow/providers/apprise/notifications/apprise.py diff --git a/airflow/providers/apprise/provider.yaml b/providers/src/airflow/providers/apprise/provider.yaml similarity index 100% rename from airflow/providers/apprise/provider.yaml rename to providers/src/airflow/providers/apprise/provider.yaml diff --git a/airflow/providers/arangodb/.latest-doc-only-change.txt b/providers/src/airflow/providers/arangodb/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/arangodb/.latest-doc-only-change.txt rename to providers/src/airflow/providers/arangodb/.latest-doc-only-change.txt diff --git a/airflow/providers/arangodb/CHANGELOG.rst b/providers/src/airflow/providers/arangodb/CHANGELOG.rst similarity index 100% rename from airflow/providers/arangodb/CHANGELOG.rst rename to providers/src/airflow/providers/arangodb/CHANGELOG.rst diff --git a/airflow/providers/arangodb/__init__.py b/providers/src/airflow/providers/arangodb/__init__.py similarity index 100% rename from airflow/providers/arangodb/__init__.py rename to providers/src/airflow/providers/arangodb/__init__.py diff --git a/airflow/providers/arangodb/operators/__init__.py b/providers/src/airflow/providers/arangodb/example_dags/__init__.py similarity index 100% rename from airflow/providers/arangodb/operators/__init__.py rename to providers/src/airflow/providers/arangodb/example_dags/__init__.py diff --git a/airflow/providers/arangodb/example_dags/example_arangodb.py b/providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py similarity index 100% rename from airflow/providers/arangodb/example_dags/example_arangodb.py rename to providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py diff --git a/airflow/providers/arangodb/sensors/__init__.py b/providers/src/airflow/providers/arangodb/hooks/__init__.py similarity index 100% rename from airflow/providers/arangodb/sensors/__init__.py rename to providers/src/airflow/providers/arangodb/hooks/__init__.py diff --git a/airflow/providers/arangodb/hooks/arangodb.py b/providers/src/airflow/providers/arangodb/hooks/arangodb.py similarity index 100% rename from airflow/providers/arangodb/hooks/arangodb.py rename to providers/src/airflow/providers/arangodb/hooks/arangodb.py diff --git a/airflow/providers/atlassian/__init__.py b/providers/src/airflow/providers/arangodb/operators/__init__.py similarity index 100% rename from airflow/providers/atlassian/__init__.py rename to providers/src/airflow/providers/arangodb/operators/__init__.py diff --git a/airflow/providers/arangodb/operators/arangodb.py b/providers/src/airflow/providers/arangodb/operators/arangodb.py similarity index 100% rename from airflow/providers/arangodb/operators/arangodb.py rename to providers/src/airflow/providers/arangodb/operators/arangodb.py diff --git a/airflow/providers/arangodb/provider.yaml b/providers/src/airflow/providers/arangodb/provider.yaml similarity index 100% rename from airflow/providers/arangodb/provider.yaml rename to providers/src/airflow/providers/arangodb/provider.yaml diff --git a/airflow/providers/atlassian/jira/hooks/__init__.py b/providers/src/airflow/providers/arangodb/sensors/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/hooks/__init__.py rename to providers/src/airflow/providers/arangodb/sensors/__init__.py diff --git a/airflow/providers/arangodb/sensors/arangodb.py b/providers/src/airflow/providers/arangodb/sensors/arangodb.py similarity index 100% rename from airflow/providers/arangodb/sensors/arangodb.py rename to providers/src/airflow/providers/arangodb/sensors/arangodb.py diff --git a/airflow/providers/asana/.latest-doc-only-change.txt b/providers/src/airflow/providers/asana/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/asana/.latest-doc-only-change.txt rename to providers/src/airflow/providers/asana/.latest-doc-only-change.txt diff --git a/airflow/providers/asana/CHANGELOG.rst b/providers/src/airflow/providers/asana/CHANGELOG.rst similarity index 100% rename from airflow/providers/asana/CHANGELOG.rst rename to providers/src/airflow/providers/asana/CHANGELOG.rst diff --git a/airflow/providers/asana/README.md b/providers/src/airflow/providers/asana/README.md similarity index 100% rename from airflow/providers/asana/README.md rename to providers/src/airflow/providers/asana/README.md diff --git a/airflow/providers/asana/__init__.py b/providers/src/airflow/providers/asana/__init__.py similarity index 100% rename from airflow/providers/asana/__init__.py rename to providers/src/airflow/providers/asana/__init__.py diff --git a/airflow/providers/asana/hooks/__init__.py b/providers/src/airflow/providers/asana/hooks/__init__.py similarity index 100% rename from airflow/providers/asana/hooks/__init__.py rename to providers/src/airflow/providers/asana/hooks/__init__.py diff --git a/airflow/providers/asana/hooks/asana.py b/providers/src/airflow/providers/asana/hooks/asana.py similarity index 100% rename from airflow/providers/asana/hooks/asana.py rename to providers/src/airflow/providers/asana/hooks/asana.py diff --git a/airflow/providers/asana/operators/__init__.py b/providers/src/airflow/providers/asana/operators/__init__.py similarity index 100% rename from airflow/providers/asana/operators/__init__.py rename to providers/src/airflow/providers/asana/operators/__init__.py diff --git a/airflow/providers/asana/operators/asana_tasks.py b/providers/src/airflow/providers/asana/operators/asana_tasks.py similarity index 100% rename from airflow/providers/asana/operators/asana_tasks.py rename to providers/src/airflow/providers/asana/operators/asana_tasks.py diff --git a/airflow/providers/asana/provider.yaml b/providers/src/airflow/providers/asana/provider.yaml similarity index 100% rename from airflow/providers/asana/provider.yaml rename to providers/src/airflow/providers/asana/provider.yaml diff --git a/airflow/providers/atlassian/jira/.latest-doc-only-change.txt b/providers/src/airflow/providers/atlassian/jira/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/atlassian/jira/.latest-doc-only-change.txt rename to providers/src/airflow/providers/atlassian/jira/.latest-doc-only-change.txt diff --git a/airflow/providers/atlassian/jira/CHANGELOG.rst b/providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst similarity index 100% rename from airflow/providers/atlassian/jira/CHANGELOG.rst rename to providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst diff --git a/airflow/providers/atlassian/jira/__init__.py b/providers/src/airflow/providers/atlassian/jira/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/__init__.py rename to providers/src/airflow/providers/atlassian/jira/__init__.py diff --git a/airflow/providers/atlassian/jira/notifications/__init__.py b/providers/src/airflow/providers/atlassian/jira/hooks/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/notifications/__init__.py rename to providers/src/airflow/providers/atlassian/jira/hooks/__init__.py diff --git a/airflow/providers/atlassian/jira/hooks/jira.py b/providers/src/airflow/providers/atlassian/jira/hooks/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/hooks/jira.py rename to providers/src/airflow/providers/atlassian/jira/hooks/jira.py diff --git a/airflow/providers/atlassian/jira/operators/__init__.py b/providers/src/airflow/providers/atlassian/jira/notifications/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/operators/__init__.py rename to providers/src/airflow/providers/atlassian/jira/notifications/__init__.py diff --git a/airflow/providers/atlassian/jira/notifications/jira.py b/providers/src/airflow/providers/atlassian/jira/notifications/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/notifications/jira.py rename to providers/src/airflow/providers/atlassian/jira/notifications/jira.py diff --git a/airflow/providers/atlassian/jira/sensors/__init__.py b/providers/src/airflow/providers/atlassian/jira/operators/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/sensors/__init__.py rename to providers/src/airflow/providers/atlassian/jira/operators/__init__.py diff --git a/airflow/providers/atlassian/jira/operators/jira.py b/providers/src/airflow/providers/atlassian/jira/operators/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/operators/jira.py rename to providers/src/airflow/providers/atlassian/jira/operators/jira.py diff --git a/airflow/providers/atlassian/jira/provider.yaml b/providers/src/airflow/providers/atlassian/jira/provider.yaml similarity index 100% rename from airflow/providers/atlassian/jira/provider.yaml rename to providers/src/airflow/providers/atlassian/jira/provider.yaml diff --git a/airflow/providers/celery/cli/__init__.py b/providers/src/airflow/providers/atlassian/jira/sensors/__init__.py similarity index 100% rename from airflow/providers/celery/cli/__init__.py rename to providers/src/airflow/providers/atlassian/jira/sensors/__init__.py diff --git a/airflow/providers/atlassian/jira/sensors/jira.py b/providers/src/airflow/providers/atlassian/jira/sensors/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/sensors/jira.py rename to providers/src/airflow/providers/atlassian/jira/sensors/jira.py diff --git a/airflow/providers/celery/.latest-doc-only-change.txt b/providers/src/airflow/providers/celery/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/celery/.latest-doc-only-change.txt rename to providers/src/airflow/providers/celery/.latest-doc-only-change.txt diff --git a/airflow/providers/celery/CHANGELOG.rst b/providers/src/airflow/providers/celery/CHANGELOG.rst similarity index 100% rename from airflow/providers/celery/CHANGELOG.rst rename to providers/src/airflow/providers/celery/CHANGELOG.rst diff --git a/airflow/providers/celery/__init__.py b/providers/src/airflow/providers/celery/__init__.py similarity index 100% rename from airflow/providers/celery/__init__.py rename to providers/src/airflow/providers/celery/__init__.py diff --git a/airflow/providers/cncf/kubernetes/backcompat/__init__.py b/providers/src/airflow/providers/celery/cli/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/backcompat/__init__.py rename to providers/src/airflow/providers/celery/cli/__init__.py diff --git a/airflow/providers/celery/cli/celery_command.py b/providers/src/airflow/providers/celery/cli/celery_command.py similarity index 100% rename from airflow/providers/celery/cli/celery_command.py rename to providers/src/airflow/providers/celery/cli/celery_command.py diff --git a/airflow/providers/celery/executors/__init__.py b/providers/src/airflow/providers/celery/executors/__init__.py similarity index 100% rename from airflow/providers/celery/executors/__init__.py rename to providers/src/airflow/providers/celery/executors/__init__.py diff --git a/airflow/providers/celery/executors/celery_executor.py b/providers/src/airflow/providers/celery/executors/celery_executor.py similarity index 100% rename from airflow/providers/celery/executors/celery_executor.py rename to providers/src/airflow/providers/celery/executors/celery_executor.py diff --git a/airflow/providers/celery/executors/celery_executor_utils.py b/providers/src/airflow/providers/celery/executors/celery_executor_utils.py similarity index 100% rename from airflow/providers/celery/executors/celery_executor_utils.py rename to providers/src/airflow/providers/celery/executors/celery_executor_utils.py diff --git a/airflow/providers/celery/executors/celery_kubernetes_executor.py b/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py similarity index 100% rename from airflow/providers/celery/executors/celery_kubernetes_executor.py rename to providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py diff --git a/airflow/providers/celery/executors/default_celery.py b/providers/src/airflow/providers/celery/executors/default_celery.py similarity index 100% rename from airflow/providers/celery/executors/default_celery.py rename to providers/src/airflow/providers/celery/executors/default_celery.py diff --git a/airflow/providers/celery/provider.yaml b/providers/src/airflow/providers/celery/provider.yaml similarity index 100% rename from airflow/providers/celery/provider.yaml rename to providers/src/airflow/providers/celery/provider.yaml diff --git a/airflow/providers/celery/sensors/__init__.py b/providers/src/airflow/providers/celery/sensors/__init__.py similarity index 100% rename from airflow/providers/celery/sensors/__init__.py rename to providers/src/airflow/providers/celery/sensors/__init__.py diff --git a/airflow/providers/celery/sensors/celery_queue.py b/providers/src/airflow/providers/celery/sensors/celery_queue.py similarity index 100% rename from airflow/providers/celery/sensors/celery_queue.py rename to providers/src/airflow/providers/celery/sensors/celery_queue.py diff --git a/airflow/providers/cloudant/.latest-doc-only-change.txt b/providers/src/airflow/providers/cloudant/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cloudant/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cloudant/.latest-doc-only-change.txt diff --git a/airflow/providers/cloudant/CHANGELOG.rst b/providers/src/airflow/providers/cloudant/CHANGELOG.rst similarity index 100% rename from airflow/providers/cloudant/CHANGELOG.rst rename to providers/src/airflow/providers/cloudant/CHANGELOG.rst diff --git a/airflow/providers/cloudant/__init__.py b/providers/src/airflow/providers/cloudant/__init__.py similarity index 100% rename from airflow/providers/cloudant/__init__.py rename to providers/src/airflow/providers/cloudant/__init__.py diff --git a/airflow/providers/cloudant/cloudant_fake.py b/providers/src/airflow/providers/cloudant/cloudant_fake.py similarity index 100% rename from airflow/providers/cloudant/cloudant_fake.py rename to providers/src/airflow/providers/cloudant/cloudant_fake.py diff --git a/airflow/providers/cloudant/hooks/__init__.py b/providers/src/airflow/providers/cloudant/hooks/__init__.py similarity index 100% rename from airflow/providers/cloudant/hooks/__init__.py rename to providers/src/airflow/providers/cloudant/hooks/__init__.py diff --git a/airflow/providers/cloudant/hooks/cloudant.py b/providers/src/airflow/providers/cloudant/hooks/cloudant.py similarity index 100% rename from airflow/providers/cloudant/hooks/cloudant.py rename to providers/src/airflow/providers/cloudant/hooks/cloudant.py diff --git a/airflow/providers/cloudant/provider.yaml b/providers/src/airflow/providers/cloudant/provider.yaml similarity index 100% rename from airflow/providers/cloudant/provider.yaml rename to providers/src/airflow/providers/cloudant/provider.yaml diff --git a/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt b/providers/src/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst similarity index 100% rename from airflow/providers/cncf/kubernetes/CHANGELOG.rst rename to providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst diff --git a/airflow/providers/cncf/kubernetes/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/__init__.py diff --git a/airflow/providers/cncf/kubernetes/cli/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/backcompat/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/cli/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/backcompat/__init__.py diff --git a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py b/providers/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py similarity index 100% rename from airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py rename to providers/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py diff --git a/airflow/providers/cncf/kubernetes/callbacks.py b/providers/src/airflow/providers/cncf/kubernetes/callbacks.py similarity index 100% rename from airflow/providers/cncf/kubernetes/callbacks.py rename to providers/src/airflow/providers/cncf/kubernetes/callbacks.py diff --git a/airflow/providers/cncf/kubernetes/hooks/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/cli/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/hooks/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/cli/__init__.py diff --git a/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py b/providers/src/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py similarity index 100% rename from airflow/providers/cncf/kubernetes/cli/kubernetes_command.py rename to providers/src/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py diff --git a/airflow/providers/cncf/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/decorators/__init__.py similarity index 100% rename from airflow/providers/cncf/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/decorators/__init__.py diff --git a/airflow/providers/cncf/kubernetes/decorators/kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/decorators/kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/decorators/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/executors/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/decorators/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/__init__.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py diff --git a/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py diff --git a/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/hooks/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/hooks/__init__.py diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/hooks/kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/hooks/kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/hooks/kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/k8s_model.py b/providers/src/airflow/providers/cncf/kubernetes/k8s_model.py similarity index 100% rename from airflow/providers/cncf/kubernetes/k8s_model.py rename to providers/src/airflow/providers/cncf/kubernetes/k8s_model.py diff --git a/airflow/providers/cncf/kubernetes/kube_client.py b/providers/src/airflow/providers/cncf/kubernetes/kube_client.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kube_client.py rename to providers/src/airflow/providers/cncf/kubernetes/kube_client.py diff --git a/airflow/providers/cncf/kubernetes/kube_config.py b/providers/src/airflow/providers/cncf/kubernetes/kube_config.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kube_config.py rename to providers/src/airflow/providers/cncf/kubernetes/kube_config.py diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py diff --git a/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml diff --git a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py diff --git a/airflow/providers/cncf/kubernetes/executors/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/operators/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/__init__.py diff --git a/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py b/providers/src/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py diff --git a/airflow/providers/cncf/kubernetes/operators/job.py b/providers/src/airflow/providers/cncf/kubernetes/operators/job.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/job.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/job.py diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/providers/src/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/pod.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/pod.py diff --git a/airflow/providers/cncf/kubernetes/operators/resource.py b/providers/src/airflow/providers/cncf/kubernetes/operators/resource.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/resource.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/resource.py diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/pod_generator.py b/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_generator.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_generator.py diff --git a/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py b/providers/src/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_generator_deprecated.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py diff --git a/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py b/providers/src/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml diff --git a/airflow/providers/cncf/kubernetes/provider.yaml b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/provider.yaml rename to providers/src/airflow/providers/cncf/kubernetes/provider.yaml diff --git a/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 b/providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 similarity index 100% rename from airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 rename to providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 diff --git a/airflow/providers/cncf/kubernetes/python_kubernetes_script.py b/providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.py similarity index 100% rename from airflow/providers/cncf/kubernetes/python_kubernetes_script.py rename to providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.py diff --git a/airflow/providers/cncf/kubernetes/sensors/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/sensors/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/__init__.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/configmap.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/configmap.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/configmap.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/configmap.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/env_variable.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/secret.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/secret.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/secret.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/secret.py diff --git a/airflow/providers/cncf/kubernetes/secret.py b/providers/src/airflow/providers/cncf/kubernetes/secret.py similarity index 100% rename from airflow/providers/cncf/kubernetes/secret.py rename to providers/src/airflow/providers/cncf/kubernetes/secret.py diff --git a/airflow/providers/cncf/kubernetes/triggers/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/sensors/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/sensors/__init__.py diff --git a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/template_rendering.py b/providers/src/airflow/providers/cncf/kubernetes/template_rendering.py similarity index 100% rename from airflow/providers/cncf/kubernetes/template_rendering.py rename to providers/src/airflow/providers/cncf/kubernetes/template_rendering.py diff --git a/airflow/providers/cohere/operators/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/__init__.py similarity index 100% rename from airflow/providers/cohere/operators/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/__init__.py diff --git a/airflow/providers/cncf/kubernetes/triggers/job.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/job.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/job.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/job.py diff --git a/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py diff --git a/airflow/providers/cncf/kubernetes/triggers/pod.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/pod.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/pod.py diff --git a/airflow/providers/cncf/kubernetes/utils/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/utils/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/__init__.py diff --git a/airflow/providers/cncf/kubernetes/utils/delete_from.py b/providers/src/airflow/providers/cncf/kubernetes/utils/delete_from.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/delete_from.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/delete_from.py diff --git a/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py b/providers/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py diff --git a/airflow/providers/cncf/kubernetes/utils/pod_manager.py b/providers/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/pod_manager.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py diff --git a/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py b/providers/src/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py diff --git a/airflow/providers/cohere/.latest-doc-only-change.txt b/providers/src/airflow/providers/cohere/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cohere/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cohere/.latest-doc-only-change.txt diff --git a/airflow/providers/cohere/CHANGELOG.rst b/providers/src/airflow/providers/cohere/CHANGELOG.rst similarity index 100% rename from airflow/providers/cohere/CHANGELOG.rst rename to providers/src/airflow/providers/cohere/CHANGELOG.rst diff --git a/airflow/providers/cohere/__init__.py b/providers/src/airflow/providers/cohere/__init__.py similarity index 100% rename from airflow/providers/cohere/__init__.py rename to providers/src/airflow/providers/cohere/__init__.py diff --git a/airflow/providers/cohere/hooks/__init__.py b/providers/src/airflow/providers/cohere/hooks/__init__.py similarity index 100% rename from airflow/providers/cohere/hooks/__init__.py rename to providers/src/airflow/providers/cohere/hooks/__init__.py diff --git a/airflow/providers/cohere/hooks/cohere.py b/providers/src/airflow/providers/cohere/hooks/cohere.py similarity index 100% rename from airflow/providers/cohere/hooks/cohere.py rename to providers/src/airflow/providers/cohere/hooks/cohere.py diff --git a/airflow/providers/common/__init__.py b/providers/src/airflow/providers/cohere/operators/__init__.py similarity index 100% rename from airflow/providers/common/__init__.py rename to providers/src/airflow/providers/cohere/operators/__init__.py diff --git a/airflow/providers/cohere/operators/embedding.py b/providers/src/airflow/providers/cohere/operators/embedding.py similarity index 100% rename from airflow/providers/cohere/operators/embedding.py rename to providers/src/airflow/providers/cohere/operators/embedding.py diff --git a/airflow/providers/cohere/provider.yaml b/providers/src/airflow/providers/cohere/provider.yaml similarity index 100% rename from airflow/providers/cohere/provider.yaml rename to providers/src/airflow/providers/cohere/provider.yaml diff --git a/airflow/providers/common/compat/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/compat/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/compat/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/compat/.latest-doc-only-change.txt diff --git a/airflow/providers/common/compat/CHANGELOG.rst b/providers/src/airflow/providers/common/compat/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/compat/CHANGELOG.rst rename to providers/src/airflow/providers/common/compat/CHANGELOG.rst diff --git a/airflow/providers/common/compat/__init__.py b/providers/src/airflow/providers/common/compat/__init__.py similarity index 100% rename from airflow/providers/common/compat/__init__.py rename to providers/src/airflow/providers/common/compat/__init__.py diff --git a/airflow/providers/common/compat/assets/__init__.py b/providers/src/airflow/providers/common/compat/assets/__init__.py similarity index 100% rename from airflow/providers/common/compat/assets/__init__.py rename to providers/src/airflow/providers/common/compat/assets/__init__.py diff --git a/airflow/providers/common/compat/lineage/__init__.py b/providers/src/airflow/providers/common/compat/lineage/__init__.py similarity index 100% rename from airflow/providers/common/compat/lineage/__init__.py rename to providers/src/airflow/providers/common/compat/lineage/__init__.py diff --git a/airflow/providers/common/compat/lineage/hook.py b/providers/src/airflow/providers/common/compat/lineage/hook.py similarity index 100% rename from airflow/providers/common/compat/lineage/hook.py rename to providers/src/airflow/providers/common/compat/lineage/hook.py diff --git a/airflow/providers/common/compat/openlineage/__init__.py b/providers/src/airflow/providers/common/compat/openlineage/__init__.py similarity index 100% rename from airflow/providers/common/compat/openlineage/__init__.py rename to providers/src/airflow/providers/common/compat/openlineage/__init__.py diff --git a/airflow/providers/common/compat/openlineage/facet.py b/providers/src/airflow/providers/common/compat/openlineage/facet.py similarity index 100% rename from airflow/providers/common/compat/openlineage/facet.py rename to providers/src/airflow/providers/common/compat/openlineage/facet.py diff --git a/airflow/providers/common/compat/openlineage/utils/__init__.py b/providers/src/airflow/providers/common/compat/openlineage/utils/__init__.py similarity index 100% rename from airflow/providers/common/compat/openlineage/utils/__init__.py rename to providers/src/airflow/providers/common/compat/openlineage/utils/__init__.py diff --git a/airflow/providers/common/compat/openlineage/utils/utils.py b/providers/src/airflow/providers/common/compat/openlineage/utils/utils.py similarity index 100% rename from airflow/providers/common/compat/openlineage/utils/utils.py rename to providers/src/airflow/providers/common/compat/openlineage/utils/utils.py diff --git a/airflow/providers/common/compat/provider.yaml b/providers/src/airflow/providers/common/compat/provider.yaml similarity index 100% rename from airflow/providers/common/compat/provider.yaml rename to providers/src/airflow/providers/common/compat/provider.yaml diff --git a/airflow/providers/common/compat/security/__init__.py b/providers/src/airflow/providers/common/compat/security/__init__.py similarity index 100% rename from airflow/providers/common/compat/security/__init__.py rename to providers/src/airflow/providers/common/compat/security/__init__.py diff --git a/airflow/providers/common/compat/security/permissions.py b/providers/src/airflow/providers/common/compat/security/permissions.py similarity index 100% rename from airflow/providers/common/compat/security/permissions.py rename to providers/src/airflow/providers/common/compat/security/permissions.py diff --git a/airflow/providers/common/io/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/io/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/io/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/io/.latest-doc-only-change.txt diff --git a/airflow/providers/common/io/CHANGELOG.rst b/providers/src/airflow/providers/common/io/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/io/CHANGELOG.rst rename to providers/src/airflow/providers/common/io/CHANGELOG.rst diff --git a/airflow/providers/common/io/__init__.py b/providers/src/airflow/providers/common/io/__init__.py similarity index 100% rename from airflow/providers/common/io/__init__.py rename to providers/src/airflow/providers/common/io/__init__.py diff --git a/airflow/providers/common/io/assets/__init__.py b/providers/src/airflow/providers/common/io/assets/__init__.py similarity index 100% rename from airflow/providers/common/io/assets/__init__.py rename to providers/src/airflow/providers/common/io/assets/__init__.py diff --git a/airflow/providers/common/io/operators/__init__.py b/providers/src/airflow/providers/common/io/assets/assets/__init__.py similarity index 100% rename from airflow/providers/common/io/operators/__init__.py rename to providers/src/airflow/providers/common/io/assets/assets/__init__.py diff --git a/airflow/providers/common/io/assets/file.py b/providers/src/airflow/providers/common/io/assets/file.py similarity index 100% rename from airflow/providers/common/io/assets/file.py rename to providers/src/airflow/providers/common/io/assets/file.py diff --git a/airflow/providers/common/sql/hooks/__init__.py b/providers/src/airflow/providers/common/io/operators/__init__.py similarity index 100% rename from airflow/providers/common/sql/hooks/__init__.py rename to providers/src/airflow/providers/common/io/operators/__init__.py diff --git a/airflow/providers/common/io/operators/file_transfer.py b/providers/src/airflow/providers/common/io/operators/file_transfer.py similarity index 100% rename from airflow/providers/common/io/operators/file_transfer.py rename to providers/src/airflow/providers/common/io/operators/file_transfer.py diff --git a/airflow/providers/common/io/provider.yaml b/providers/src/airflow/providers/common/io/provider.yaml similarity index 100% rename from airflow/providers/common/io/provider.yaml rename to providers/src/airflow/providers/common/io/provider.yaml diff --git a/airflow/providers/common/io/xcom/__init__.py b/providers/src/airflow/providers/common/io/xcom/__init__.py similarity index 100% rename from airflow/providers/common/io/xcom/__init__.py rename to providers/src/airflow/providers/common/io/xcom/__init__.py diff --git a/airflow/providers/common/io/xcom/backend.py b/providers/src/airflow/providers/common/io/xcom/backend.py similarity index 100% rename from airflow/providers/common/io/xcom/backend.py rename to providers/src/airflow/providers/common/io/xcom/backend.py diff --git a/airflow/providers/common/sql/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/sql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/sql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/sql/.latest-doc-only-change.txt diff --git a/airflow/providers/common/sql/CHANGELOG.rst b/providers/src/airflow/providers/common/sql/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/sql/CHANGELOG.rst rename to providers/src/airflow/providers/common/sql/CHANGELOG.rst diff --git a/airflow/providers/common/sql/README_API.md b/providers/src/airflow/providers/common/sql/README_API.md similarity index 100% rename from airflow/providers/common/sql/README_API.md rename to providers/src/airflow/providers/common/sql/README_API.md diff --git a/airflow/providers/common/sql/__init__.py b/providers/src/airflow/providers/common/sql/__init__.py similarity index 100% rename from airflow/providers/common/sql/__init__.py rename to providers/src/airflow/providers/common/sql/__init__.py diff --git a/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md b/providers/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md similarity index 100% rename from airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md rename to providers/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md diff --git a/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md b/providers/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md similarity index 100% rename from airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md rename to providers/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md diff --git a/airflow/providers/common/sql/operators/__init__.py b/providers/src/airflow/providers/common/sql/hooks/__init__.py similarity index 100% rename from airflow/providers/common/sql/operators/__init__.py rename to providers/src/airflow/providers/common/sql/hooks/__init__.py diff --git a/airflow/providers/common/sql/hooks/sql.py b/providers/src/airflow/providers/common/sql/hooks/sql.py similarity index 99% rename from airflow/providers/common/sql/hooks/sql.py rename to providers/src/airflow/providers/common/sql/hooks/sql.py index 7983808d0d57..afb66ddd13a0 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/providers/src/airflow/providers/common/sql/hooks/sql.py @@ -190,7 +190,7 @@ def get_conn_id(self) -> str: return getattr(self, self.conn_name_attr) @cached_property - def placeholder(self): + def placeholder(self) -> str: placeholder = self.connection_extra.get("placeholder") if placeholder: if placeholder in SQL_PLACEHOLDERS: @@ -235,9 +235,11 @@ def connection_extra_lower(self) -> dict: """ return {k.lower(): v for k, v in self.connection_extra.items()} - def get_conn(self): + def get_conn(self) -> Any: """Return a connection object.""" db = self.connection + if self.connector is None: + raise RuntimeError(f"{type(self).__name__} didn't have `self.connector` set!") return self.connector.connect(host=db.host, port=db.port, username=db.login, schema=db.schema) def get_uri(self) -> str: @@ -570,7 +572,7 @@ def get_autocommit(self, conn) -> bool: """ return getattr(conn, "autocommit", False) and self.supports_autocommit - def get_cursor(self): + def get_cursor(self) -> Any: """Return a cursor.""" return self.get_conn().cursor() diff --git a/airflow/providers/common/sql/hooks/sql.pyi b/providers/src/airflow/providers/common/sql/hooks/sql.pyi similarity index 92% rename from airflow/providers/common/sql/hooks/sql.pyi rename to providers/src/airflow/providers/common/sql/hooks/sql.pyi index e54b03399141..0039733d966a 100644 --- a/airflow/providers/common/sql/hooks/sql.pyi +++ b/providers/src/airflow/providers/common/sql/hooks/sql.pyi @@ -31,19 +31,14 @@ Definition of the public interface for airflow.providers.common.sql.hooks.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import ( - AirflowException as AirflowException, - AirflowOptionalProviderFeatureException as AirflowOptionalProviderFeatureException, - AirflowProviderDeprecationWarning as AirflowProviderDeprecationWarning, -) +from _typeshed import Incomplete as Incomplete from airflow.hooks.base import BaseHook as BaseHook from airflow.models import Connection as Connection from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage from airflow.providers.openlineage.sqlparser import DatabaseInfo as DatabaseInfo from functools import cached_property as cached_property from pandas import DataFrame as DataFrame -from sqlalchemy.engine import Inspector, URL as URL +from sqlalchemy.engine import Inspector as Inspector, URL as URL from typing import Any, Callable, Generator, Iterable, Mapping, Protocol, Sequence, TypeVar, overload T = TypeVar("T") @@ -67,7 +62,7 @@ class DbApiHook(BaseHook): def __init__(self, *args, schema: str | None = None, log_sql: bool = True, **kwargs) -> None: ... def get_conn_id(self) -> str: ... @cached_property - def placeholder(self): ... + def placeholder(self) -> str: ... @property def connection(self) -> Connection: ... @connection.setter @@ -76,7 +71,7 @@ class DbApiHook(BaseHook): def connection_extra(self) -> dict: ... @cached_property def connection_extra_lower(self) -> dict: ... - def get_conn(self): ... + def get_conn(self) -> Any: ... def get_uri(self) -> str: ... @property def sqlalchemy_url(self) -> URL: ... @@ -123,7 +118,7 @@ class DbApiHook(BaseHook): ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None: ... def set_autocommit(self, conn, autocommit) -> None: ... def get_autocommit(self, conn) -> bool: ... - def get_cursor(self): ... + def get_cursor(self) -> Any: ... def insert_rows( self, table, @@ -138,7 +133,7 @@ class DbApiHook(BaseHook): ): ... def bulk_dump(self, table, tmp_file) -> None: ... def bulk_load(self, table, tmp_file) -> None: ... - def test_connection(self): ... + def test_connection(self) -> None: ... def get_openlineage_database_info(self, connection) -> DatabaseInfo | None: ... def get_openlineage_database_dialect(self, connection) -> str: ... def get_openlineage_default_schema(self) -> str | None: ... diff --git a/airflow/providers/common/sql/sensors/__init__.py b/providers/src/airflow/providers/common/sql/operators/__init__.py similarity index 100% rename from airflow/providers/common/sql/sensors/__init__.py rename to providers/src/airflow/providers/common/sql/operators/__init__.py diff --git a/airflow/providers/common/sql/operators/sql.py b/providers/src/airflow/providers/common/sql/operators/sql.py similarity index 100% rename from airflow/providers/common/sql/operators/sql.py rename to providers/src/airflow/providers/common/sql/operators/sql.py diff --git a/airflow/providers/common/sql/operators/sql.pyi b/providers/src/airflow/providers/common/sql/operators/sql.pyi similarity index 92% rename from airflow/providers/common/sql/operators/sql.pyi rename to providers/src/airflow/providers/common/sql/operators/sql.pyi index 0a63ccaa7cc3..1b97cec5023c 100644 --- a/airflow/providers/common/sql/operators/sql.pyi +++ b/providers/src/airflow/providers/common/sql/operators/sql.pyi @@ -31,25 +31,13 @@ Definition of the public interface for airflow.providers.common.sql.operators.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import ( - AirflowException as AirflowException, - AirflowFailException as AirflowFailException, -) -from airflow.hooks.base import BaseHook as BaseHook +from _typeshed import Incomplete as Incomplete from airflow.models import BaseOperator as BaseOperator, SkipMixin as SkipMixin -from airflow.providers.common.sql.hooks.sql import ( - DbApiHook as DbApiHook, - fetch_all_handler as fetch_all_handler, - return_single_query_results as return_single_query_results, -) +from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage from airflow.utils.context import Context as Context -from airflow.utils.helpers import merge_dicts as merge_dicts -from functools import cached_property as cached_property from typing import Any, Callable, Iterable, Mapping, Sequence, SupportsAbs -def _parse_boolean(val: str) -> str | bool: ... def parse_boolean(val: str) -> str | bool: ... class BaseSQLOperator(BaseOperator): @@ -97,7 +85,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator): show_return_value_in_logs: bool = False, **kwargs, ) -> None: ... - def execute(self, context): ... + def execute(self, context) -> None: ... def prepare_template(self) -> None: ... def get_openlineage_facets_on_start(self) -> OperatorLineage | None: ... def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None: ... diff --git a/airflow/providers/common/sql/provider.yaml b/providers/src/airflow/providers/common/sql/provider.yaml similarity index 100% rename from airflow/providers/common/sql/provider.yaml rename to providers/src/airflow/providers/common/sql/provider.yaml diff --git a/airflow/providers/databricks/plugins/__init__.py b/providers/src/airflow/providers/common/sql/sensors/__init__.py similarity index 100% rename from airflow/providers/databricks/plugins/__init__.py rename to providers/src/airflow/providers/common/sql/sensors/__init__.py diff --git a/airflow/providers/common/sql/sensors/sql.py b/providers/src/airflow/providers/common/sql/sensors/sql.py similarity index 100% rename from airflow/providers/common/sql/sensors/sql.py rename to providers/src/airflow/providers/common/sql/sensors/sql.py diff --git a/airflow/providers/common/sql/sensors/sql.pyi b/providers/src/airflow/providers/common/sql/sensors/sql.pyi similarity index 91% rename from airflow/providers/common/sql/sensors/sql.pyi rename to providers/src/airflow/providers/common/sql/sensors/sql.pyi index db92f6d6e02a..4dcbbd0edbc9 100644 --- a/airflow/providers/common/sql/sensors/sql.pyi +++ b/providers/src/airflow/providers/common/sql/sensors/sql.pyi @@ -31,10 +31,7 @@ Definition of the public interface for airflow.providers.common.sql.sensors.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import AirflowException as AirflowException -from airflow.hooks.base import BaseHook as BaseHook -from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook +from _typeshed import Incomplete as Incomplete from airflow.sensors.base import BaseSensorOperator as BaseSensorOperator from airflow.utils.context import Context as Context from typing import Any, Callable, Mapping, Sequence diff --git a/airflow/providers/databricks/.latest-doc-only-change.txt b/providers/src/airflow/providers/databricks/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/databricks/.latest-doc-only-change.txt rename to providers/src/airflow/providers/databricks/.latest-doc-only-change.txt diff --git a/airflow/providers/databricks/CHANGELOG.rst b/providers/src/airflow/providers/databricks/CHANGELOG.rst similarity index 100% rename from airflow/providers/databricks/CHANGELOG.rst rename to providers/src/airflow/providers/databricks/CHANGELOG.rst diff --git a/airflow/providers/databricks/__init__.py b/providers/src/airflow/providers/databricks/__init__.py similarity index 100% rename from airflow/providers/databricks/__init__.py rename to providers/src/airflow/providers/databricks/__init__.py diff --git a/airflow/providers/cncf/kubernetes/operators/__init__.py b/providers/src/airflow/providers/databricks/hooks/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/__init__.py rename to providers/src/airflow/providers/databricks/hooks/__init__.py diff --git a/airflow/providers/databricks/hooks/databricks.py b/providers/src/airflow/providers/databricks/hooks/databricks.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks.py rename to providers/src/airflow/providers/databricks/hooks/databricks.py diff --git a/airflow/providers/databricks/hooks/databricks_base.py b/providers/src/airflow/providers/databricks/hooks/databricks_base.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks_base.py rename to providers/src/airflow/providers/databricks/hooks/databricks_base.py diff --git a/airflow/providers/databricks/hooks/databricks_sql.py b/providers/src/airflow/providers/databricks/hooks/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks_sql.py rename to providers/src/airflow/providers/databricks/hooks/databricks_sql.py diff --git a/airflow/providers/databricks/hooks/__init__.py b/providers/src/airflow/providers/databricks/operators/__init__.py similarity index 100% rename from airflow/providers/databricks/hooks/__init__.py rename to providers/src/airflow/providers/databricks/operators/__init__.py diff --git a/airflow/providers/databricks/operators/databricks.py b/providers/src/airflow/providers/databricks/operators/databricks.py similarity index 100% rename from airflow/providers/databricks/operators/databricks.py rename to providers/src/airflow/providers/databricks/operators/databricks.py diff --git a/airflow/providers/databricks/operators/databricks_repos.py b/providers/src/airflow/providers/databricks/operators/databricks_repos.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_repos.py rename to providers/src/airflow/providers/databricks/operators/databricks_repos.py diff --git a/airflow/providers/databricks/operators/databricks_sql.py b/providers/src/airflow/providers/databricks/operators/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_sql.py rename to providers/src/airflow/providers/databricks/operators/databricks_sql.py diff --git a/airflow/providers/databricks/operators/databricks_workflow.py b/providers/src/airflow/providers/databricks/operators/databricks_workflow.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_workflow.py rename to providers/src/airflow/providers/databricks/operators/databricks_workflow.py diff --git a/airflow/providers/databricks/sensors/__init__.py b/providers/src/airflow/providers/databricks/plugins/__init__.py similarity index 100% rename from airflow/providers/databricks/sensors/__init__.py rename to providers/src/airflow/providers/databricks/plugins/__init__.py diff --git a/airflow/providers/databricks/plugins/databricks_workflow.py b/providers/src/airflow/providers/databricks/plugins/databricks_workflow.py similarity index 100% rename from airflow/providers/databricks/plugins/databricks_workflow.py rename to providers/src/airflow/providers/databricks/plugins/databricks_workflow.py diff --git a/airflow/providers/databricks/provider.yaml b/providers/src/airflow/providers/databricks/provider.yaml similarity index 100% rename from airflow/providers/databricks/provider.yaml rename to providers/src/airflow/providers/databricks/provider.yaml diff --git a/airflow/providers/databricks/utils/__init__.py b/providers/src/airflow/providers/databricks/sensors/__init__.py similarity index 100% rename from airflow/providers/databricks/utils/__init__.py rename to providers/src/airflow/providers/databricks/sensors/__init__.py diff --git a/airflow/providers/databricks/sensors/databricks_partition.py b/providers/src/airflow/providers/databricks/sensors/databricks_partition.py similarity index 100% rename from airflow/providers/databricks/sensors/databricks_partition.py rename to providers/src/airflow/providers/databricks/sensors/databricks_partition.py diff --git a/airflow/providers/databricks/sensors/databricks_sql.py b/providers/src/airflow/providers/databricks/sensors/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/sensors/databricks_sql.py rename to providers/src/airflow/providers/databricks/sensors/databricks_sql.py diff --git a/airflow/providers/databricks/operators/__init__.py b/providers/src/airflow/providers/databricks/triggers/__init__.py similarity index 100% rename from airflow/providers/databricks/operators/__init__.py rename to providers/src/airflow/providers/databricks/triggers/__init__.py diff --git a/airflow/providers/databricks/triggers/databricks.py b/providers/src/airflow/providers/databricks/triggers/databricks.py similarity index 100% rename from airflow/providers/databricks/triggers/databricks.py rename to providers/src/airflow/providers/databricks/triggers/databricks.py diff --git a/airflow/providers/dbt/__init__.py b/providers/src/airflow/providers/databricks/utils/__init__.py similarity index 100% rename from airflow/providers/dbt/__init__.py rename to providers/src/airflow/providers/databricks/utils/__init__.py diff --git a/airflow/providers/databricks/utils/databricks.py b/providers/src/airflow/providers/databricks/utils/databricks.py similarity index 100% rename from airflow/providers/databricks/utils/databricks.py rename to providers/src/airflow/providers/databricks/utils/databricks.py diff --git a/airflow/providers/datadog/.latest-doc-only-change.txt b/providers/src/airflow/providers/datadog/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/datadog/.latest-doc-only-change.txt rename to providers/src/airflow/providers/datadog/.latest-doc-only-change.txt diff --git a/airflow/providers/datadog/CHANGELOG.rst b/providers/src/airflow/providers/datadog/CHANGELOG.rst similarity index 100% rename from airflow/providers/datadog/CHANGELOG.rst rename to providers/src/airflow/providers/datadog/CHANGELOG.rst diff --git a/airflow/providers/datadog/__init__.py b/providers/src/airflow/providers/datadog/__init__.py similarity index 100% rename from airflow/providers/datadog/__init__.py rename to providers/src/airflow/providers/datadog/__init__.py diff --git a/airflow/providers/databricks/triggers/__init__.py b/providers/src/airflow/providers/datadog/hooks/__init__.py similarity index 100% rename from airflow/providers/databricks/triggers/__init__.py rename to providers/src/airflow/providers/datadog/hooks/__init__.py diff --git a/airflow/providers/datadog/hooks/datadog.py b/providers/src/airflow/providers/datadog/hooks/datadog.py similarity index 100% rename from airflow/providers/datadog/hooks/datadog.py rename to providers/src/airflow/providers/datadog/hooks/datadog.py diff --git a/airflow/providers/datadog/provider.yaml b/providers/src/airflow/providers/datadog/provider.yaml similarity index 100% rename from airflow/providers/datadog/provider.yaml rename to providers/src/airflow/providers/datadog/provider.yaml diff --git a/airflow/providers/datadog/hooks/__init__.py b/providers/src/airflow/providers/datadog/sensors/__init__.py similarity index 100% rename from airflow/providers/datadog/hooks/__init__.py rename to providers/src/airflow/providers/datadog/sensors/__init__.py diff --git a/airflow/providers/datadog/sensors/datadog.py b/providers/src/airflow/providers/datadog/sensors/datadog.py similarity index 100% rename from airflow/providers/datadog/sensors/datadog.py rename to providers/src/airflow/providers/datadog/sensors/datadog.py diff --git a/airflow/providers/dbt/cloud/.latest-doc-only-change.txt b/providers/src/airflow/providers/dbt/cloud/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/dbt/cloud/.latest-doc-only-change.txt rename to providers/src/airflow/providers/dbt/cloud/.latest-doc-only-change.txt diff --git a/airflow/providers/dbt/cloud/CHANGELOG.rst b/providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst similarity index 100% rename from airflow/providers/dbt/cloud/CHANGELOG.rst rename to providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst diff --git a/airflow/providers/dbt/cloud/__init__.py b/providers/src/airflow/providers/dbt/cloud/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/__init__.py rename to providers/src/airflow/providers/dbt/cloud/__init__.py diff --git a/airflow/providers/dbt/cloud/hooks/__init__.py b/providers/src/airflow/providers/dbt/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/hooks/__init__.py rename to providers/src/airflow/providers/dbt/cloud/hooks/__init__.py diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/providers/src/airflow/providers/dbt/cloud/hooks/dbt.py similarity index 99% rename from airflow/providers/dbt/cloud/hooks/dbt.py rename to providers/src/airflow/providers/dbt/cloud/hooks/dbt.py index 4007054be69c..7c38001c2a76 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/providers/src/airflow/providers/dbt/cloud/hooks/dbt.py @@ -20,10 +20,11 @@ import json import time import warnings +from collections.abc import Sequence from enum import Enum from functools import cached_property, wraps from inspect import signature -from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast import aiohttp from asgiref.sync import sync_to_async @@ -117,7 +118,7 @@ class DbtCloudJobRunStatus(Enum): @classmethod def check_is_valid(cls, statuses: int | Sequence[int] | set[int]): """Validate input statuses are a known value.""" - if isinstance(statuses, (Sequence, Set)): + if isinstance(statuses, (Sequence, set)): for status in statuses: cls(status) else: diff --git a/airflow/providers/dbt/cloud/operators/__init__.py b/providers/src/airflow/providers/dbt/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/operators/__init__.py rename to providers/src/airflow/providers/dbt/cloud/operators/__init__.py diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/providers/src/airflow/providers/dbt/cloud/operators/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/operators/dbt.py rename to providers/src/airflow/providers/dbt/cloud/operators/dbt.py diff --git a/airflow/providers/dbt/cloud/provider.yaml b/providers/src/airflow/providers/dbt/cloud/provider.yaml similarity index 100% rename from airflow/providers/dbt/cloud/provider.yaml rename to providers/src/airflow/providers/dbt/cloud/provider.yaml diff --git a/airflow/providers/datadog/sensors/__init__.py b/providers/src/airflow/providers/dbt/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/datadog/sensors/__init__.py rename to providers/src/airflow/providers/dbt/cloud/sensors/__init__.py diff --git a/airflow/providers/dbt/cloud/sensors/dbt.py b/providers/src/airflow/providers/dbt/cloud/sensors/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/sensors/dbt.py rename to providers/src/airflow/providers/dbt/cloud/sensors/dbt.py diff --git a/airflow/providers/dbt/cloud/triggers/__init__.py b/providers/src/airflow/providers/dbt/cloud/triggers/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/triggers/__init__.py rename to providers/src/airflow/providers/dbt/cloud/triggers/__init__.py diff --git a/airflow/providers/dbt/cloud/triggers/dbt.py b/providers/src/airflow/providers/dbt/cloud/triggers/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/triggers/dbt.py rename to providers/src/airflow/providers/dbt/cloud/triggers/dbt.py diff --git a/airflow/providers/dbt/cloud/utils/__init__.py b/providers/src/airflow/providers/dbt/cloud/utils/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/utils/__init__.py rename to providers/src/airflow/providers/dbt/cloud/utils/__init__.py diff --git a/airflow/providers/dbt/cloud/utils/openlineage.py b/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py similarity index 100% rename from airflow/providers/dbt/cloud/utils/openlineage.py rename to providers/src/airflow/providers/dbt/cloud/utils/openlineage.py diff --git a/airflow/providers/dingding/.latest-doc-only-change.txt b/providers/src/airflow/providers/dingding/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/dingding/.latest-doc-only-change.txt rename to providers/src/airflow/providers/dingding/.latest-doc-only-change.txt diff --git a/airflow/providers/dingding/CHANGELOG.rst b/providers/src/airflow/providers/dingding/CHANGELOG.rst similarity index 100% rename from airflow/providers/dingding/CHANGELOG.rst rename to providers/src/airflow/providers/dingding/CHANGELOG.rst diff --git a/airflow/providers/dingding/__init__.py b/providers/src/airflow/providers/dingding/__init__.py similarity index 100% rename from airflow/providers/dingding/__init__.py rename to providers/src/airflow/providers/dingding/__init__.py diff --git a/airflow/providers/dbt/cloud/sensors/__init__.py b/providers/src/airflow/providers/dingding/hooks/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/sensors/__init__.py rename to providers/src/airflow/providers/dingding/hooks/__init__.py diff --git a/airflow/providers/dingding/hooks/dingding.py b/providers/src/airflow/providers/dingding/hooks/dingding.py similarity index 100% rename from airflow/providers/dingding/hooks/dingding.py rename to providers/src/airflow/providers/dingding/hooks/dingding.py diff --git a/airflow/providers/dingding/hooks/__init__.py b/providers/src/airflow/providers/dingding/operators/__init__.py similarity index 100% rename from airflow/providers/dingding/hooks/__init__.py rename to providers/src/airflow/providers/dingding/operators/__init__.py diff --git a/airflow/providers/dingding/operators/dingding.py b/providers/src/airflow/providers/dingding/operators/dingding.py similarity index 100% rename from airflow/providers/dingding/operators/dingding.py rename to providers/src/airflow/providers/dingding/operators/dingding.py diff --git a/airflow/providers/dingding/provider.yaml b/providers/src/airflow/providers/dingding/provider.yaml similarity index 100% rename from airflow/providers/dingding/provider.yaml rename to providers/src/airflow/providers/dingding/provider.yaml diff --git a/airflow/providers/discord/.latest-doc-only-change.txt b/providers/src/airflow/providers/discord/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/discord/.latest-doc-only-change.txt rename to providers/src/airflow/providers/discord/.latest-doc-only-change.txt diff --git a/airflow/providers/discord/CHANGELOG.rst b/providers/src/airflow/providers/discord/CHANGELOG.rst similarity index 100% rename from airflow/providers/discord/CHANGELOG.rst rename to providers/src/airflow/providers/discord/CHANGELOG.rst diff --git a/airflow/providers/discord/__init__.py b/providers/src/airflow/providers/discord/__init__.py similarity index 100% rename from airflow/providers/discord/__init__.py rename to providers/src/airflow/providers/discord/__init__.py diff --git a/airflow/providers/dingding/operators/__init__.py b/providers/src/airflow/providers/discord/hooks/__init__.py similarity index 100% rename from airflow/providers/dingding/operators/__init__.py rename to providers/src/airflow/providers/discord/hooks/__init__.py diff --git a/airflow/providers/discord/hooks/discord_webhook.py b/providers/src/airflow/providers/discord/hooks/discord_webhook.py similarity index 100% rename from airflow/providers/discord/hooks/discord_webhook.py rename to providers/src/airflow/providers/discord/hooks/discord_webhook.py diff --git a/airflow/providers/discord/hooks/__init__.py b/providers/src/airflow/providers/discord/notifications/__init__.py similarity index 100% rename from airflow/providers/discord/hooks/__init__.py rename to providers/src/airflow/providers/discord/notifications/__init__.py diff --git a/airflow/providers/discord/notifications/discord.py b/providers/src/airflow/providers/discord/notifications/discord.py similarity index 100% rename from airflow/providers/discord/notifications/discord.py rename to providers/src/airflow/providers/discord/notifications/discord.py diff --git a/airflow/providers/discord/notifications/__init__.py b/providers/src/airflow/providers/discord/operators/__init__.py similarity index 100% rename from airflow/providers/discord/notifications/__init__.py rename to providers/src/airflow/providers/discord/operators/__init__.py diff --git a/airflow/providers/discord/operators/discord_webhook.py b/providers/src/airflow/providers/discord/operators/discord_webhook.py similarity index 100% rename from airflow/providers/discord/operators/discord_webhook.py rename to providers/src/airflow/providers/discord/operators/discord_webhook.py diff --git a/airflow/providers/discord/provider.yaml b/providers/src/airflow/providers/discord/provider.yaml similarity index 100% rename from airflow/providers/discord/provider.yaml rename to providers/src/airflow/providers/discord/provider.yaml diff --git a/airflow/providers/docker/.latest-doc-only-change.txt b/providers/src/airflow/providers/docker/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/docker/.latest-doc-only-change.txt rename to providers/src/airflow/providers/docker/.latest-doc-only-change.txt diff --git a/airflow/providers/docker/CHANGELOG.rst b/providers/src/airflow/providers/docker/CHANGELOG.rst similarity index 100% rename from airflow/providers/docker/CHANGELOG.rst rename to providers/src/airflow/providers/docker/CHANGELOG.rst diff --git a/airflow/providers/docker/__init__.py b/providers/src/airflow/providers/docker/__init__.py similarity index 100% rename from airflow/providers/docker/__init__.py rename to providers/src/airflow/providers/docker/__init__.py diff --git a/airflow/providers/discord/operators/__init__.py b/providers/src/airflow/providers/docker/decorators/__init__.py similarity index 100% rename from airflow/providers/discord/operators/__init__.py rename to providers/src/airflow/providers/docker/decorators/__init__.py diff --git a/airflow/providers/docker/decorators/docker.py b/providers/src/airflow/providers/docker/decorators/docker.py similarity index 100% rename from airflow/providers/docker/decorators/docker.py rename to providers/src/airflow/providers/docker/decorators/docker.py diff --git a/airflow/providers/docker/exceptions.py b/providers/src/airflow/providers/docker/exceptions.py similarity index 100% rename from airflow/providers/docker/exceptions.py rename to providers/src/airflow/providers/docker/exceptions.py diff --git a/airflow/providers/docker/decorators/__init__.py b/providers/src/airflow/providers/docker/hooks/__init__.py similarity index 100% rename from airflow/providers/docker/decorators/__init__.py rename to providers/src/airflow/providers/docker/hooks/__init__.py diff --git a/airflow/providers/docker/hooks/docker.py b/providers/src/airflow/providers/docker/hooks/docker.py similarity index 100% rename from airflow/providers/docker/hooks/docker.py rename to providers/src/airflow/providers/docker/hooks/docker.py diff --git a/airflow/providers/docker/hooks/__init__.py b/providers/src/airflow/providers/docker/operators/__init__.py similarity index 100% rename from airflow/providers/docker/hooks/__init__.py rename to providers/src/airflow/providers/docker/operators/__init__.py diff --git a/airflow/providers/docker/operators/docker.py b/providers/src/airflow/providers/docker/operators/docker.py similarity index 100% rename from airflow/providers/docker/operators/docker.py rename to providers/src/airflow/providers/docker/operators/docker.py diff --git a/airflow/providers/docker/operators/docker_swarm.py b/providers/src/airflow/providers/docker/operators/docker_swarm.py similarity index 100% rename from airflow/providers/docker/operators/docker_swarm.py rename to providers/src/airflow/providers/docker/operators/docker_swarm.py diff --git a/airflow/providers/docker/provider.yaml b/providers/src/airflow/providers/docker/provider.yaml similarity index 100% rename from airflow/providers/docker/provider.yaml rename to providers/src/airflow/providers/docker/provider.yaml diff --git a/airflow/providers/edge/CHANGELOG.rst b/providers/src/airflow/providers/edge/CHANGELOG.rst similarity index 100% rename from airflow/providers/edge/CHANGELOG.rst rename to providers/src/airflow/providers/edge/CHANGELOG.rst diff --git a/airflow/providers/edge/__init__.py b/providers/src/airflow/providers/edge/__init__.py similarity index 100% rename from airflow/providers/edge/__init__.py rename to providers/src/airflow/providers/edge/__init__.py diff --git a/airflow/providers/edge/api_endpoints/__init__.py b/providers/src/airflow/providers/edge/api_endpoints/__init__.py similarity index 100% rename from airflow/providers/edge/api_endpoints/__init__.py rename to providers/src/airflow/providers/edge/api_endpoints/__init__.py diff --git a/airflow/providers/edge/api_endpoints/health_endpoint.py b/providers/src/airflow/providers/edge/api_endpoints/health_endpoint.py similarity index 100% rename from airflow/providers/edge/api_endpoints/health_endpoint.py rename to providers/src/airflow/providers/edge/api_endpoints/health_endpoint.py diff --git a/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py b/providers/src/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py similarity index 100% rename from airflow/providers/edge/api_endpoints/rpc_api_endpoint.py rename to providers/src/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py diff --git a/airflow/providers/edge/cli/__init__.py b/providers/src/airflow/providers/edge/cli/__init__.py similarity index 100% rename from airflow/providers/edge/cli/__init__.py rename to providers/src/airflow/providers/edge/cli/__init__.py diff --git a/airflow/providers/edge/cli/edge_command.py b/providers/src/airflow/providers/edge/cli/edge_command.py similarity index 100% rename from airflow/providers/edge/cli/edge_command.py rename to providers/src/airflow/providers/edge/cli/edge_command.py diff --git a/airflow/providers/edge/example_dags/__init__.py b/providers/src/airflow/providers/edge/example_dags/__init__.py similarity index 100% rename from airflow/providers/edge/example_dags/__init__.py rename to providers/src/airflow/providers/edge/example_dags/__init__.py diff --git a/airflow/providers/edge/example_dags/integration_test.py b/providers/src/airflow/providers/edge/example_dags/integration_test.py similarity index 100% rename from airflow/providers/edge/example_dags/integration_test.py rename to providers/src/airflow/providers/edge/example_dags/integration_test.py diff --git a/airflow/providers/edge/models/__init__.py b/providers/src/airflow/providers/edge/models/__init__.py similarity index 100% rename from airflow/providers/edge/models/__init__.py rename to providers/src/airflow/providers/edge/models/__init__.py diff --git a/airflow/providers/edge/models/edge_job.py b/providers/src/airflow/providers/edge/models/edge_job.py similarity index 100% rename from airflow/providers/edge/models/edge_job.py rename to providers/src/airflow/providers/edge/models/edge_job.py diff --git a/airflow/providers/edge/models/edge_logs.py b/providers/src/airflow/providers/edge/models/edge_logs.py similarity index 100% rename from airflow/providers/edge/models/edge_logs.py rename to providers/src/airflow/providers/edge/models/edge_logs.py diff --git a/airflow/providers/edge/models/edge_worker.py b/providers/src/airflow/providers/edge/models/edge_worker.py similarity index 100% rename from airflow/providers/edge/models/edge_worker.py rename to providers/src/airflow/providers/edge/models/edge_worker.py diff --git a/airflow/providers/edge/openapi/__init__.py b/providers/src/airflow/providers/edge/openapi/__init__.py similarity index 100% rename from airflow/providers/edge/openapi/__init__.py rename to providers/src/airflow/providers/edge/openapi/__init__.py diff --git a/airflow/providers/edge/openapi/edge_worker_api_v1.yaml b/providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml similarity index 100% rename from airflow/providers/edge/openapi/edge_worker_api_v1.yaml rename to providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml diff --git a/airflow/providers/edge/plugins/__init__.py b/providers/src/airflow/providers/edge/plugins/__init__.py similarity index 100% rename from airflow/providers/edge/plugins/__init__.py rename to providers/src/airflow/providers/edge/plugins/__init__.py diff --git a/airflow/providers/edge/plugins/edge_executor_plugin.py b/providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py similarity index 100% rename from airflow/providers/edge/plugins/edge_executor_plugin.py rename to providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py diff --git a/airflow/providers/edge/plugins/templates/edge_worker_hosts.html b/providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html similarity index 100% rename from airflow/providers/edge/plugins/templates/edge_worker_hosts.html rename to providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html diff --git a/airflow/providers/edge/plugins/templates/edge_worker_jobs.html b/providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html similarity index 100% rename from airflow/providers/edge/plugins/templates/edge_worker_jobs.html rename to providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html diff --git a/airflow/providers/edge/provider.yaml b/providers/src/airflow/providers/edge/provider.yaml similarity index 100% rename from airflow/providers/edge/provider.yaml rename to providers/src/airflow/providers/edge/provider.yaml diff --git a/airflow/providers/elasticsearch/.latest-doc-only-change.txt b/providers/src/airflow/providers/elasticsearch/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/elasticsearch/.latest-doc-only-change.txt rename to providers/src/airflow/providers/elasticsearch/.latest-doc-only-change.txt diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst b/providers/src/airflow/providers/elasticsearch/CHANGELOG.rst similarity index 100% rename from airflow/providers/elasticsearch/CHANGELOG.rst rename to providers/src/airflow/providers/elasticsearch/CHANGELOG.rst diff --git a/airflow/providers/elasticsearch/__init__.py b/providers/src/airflow/providers/elasticsearch/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/__init__.py rename to providers/src/airflow/providers/elasticsearch/__init__.py diff --git a/airflow/providers/docker/operators/__init__.py b/providers/src/airflow/providers/elasticsearch/hooks/__init__.py similarity index 100% rename from airflow/providers/docker/operators/__init__.py rename to providers/src/airflow/providers/elasticsearch/hooks/__init__.py diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/providers/src/airflow/providers/elasticsearch/hooks/elasticsearch.py similarity index 100% rename from airflow/providers/elasticsearch/hooks/elasticsearch.py rename to providers/src/airflow/providers/elasticsearch/hooks/elasticsearch.py diff --git a/airflow/providers/elasticsearch/log/__init__.py b/providers/src/airflow/providers/elasticsearch/log/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/log/__init__.py rename to providers/src/airflow/providers/elasticsearch/log/__init__.py diff --git a/airflow/providers/elasticsearch/log/es_json_formatter.py b/providers/src/airflow/providers/elasticsearch/log/es_json_formatter.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_json_formatter.py rename to providers/src/airflow/providers/elasticsearch/log/es_json_formatter.py diff --git a/airflow/providers/elasticsearch/log/es_response.py b/providers/src/airflow/providers/elasticsearch/log/es_response.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_response.py rename to providers/src/airflow/providers/elasticsearch/log/es_response.py diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/providers/src/airflow/providers/elasticsearch/log/es_task_handler.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_task_handler.py rename to providers/src/airflow/providers/elasticsearch/log/es_task_handler.py diff --git a/airflow/providers/elasticsearch/provider.yaml b/providers/src/airflow/providers/elasticsearch/provider.yaml similarity index 100% rename from airflow/providers/elasticsearch/provider.yaml rename to providers/src/airflow/providers/elasticsearch/provider.yaml diff --git a/airflow/providers/exasol/.latest-doc-only-change.txt b/providers/src/airflow/providers/exasol/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/exasol/.latest-doc-only-change.txt rename to providers/src/airflow/providers/exasol/.latest-doc-only-change.txt diff --git a/airflow/providers/exasol/CHANGELOG.rst b/providers/src/airflow/providers/exasol/CHANGELOG.rst similarity index 100% rename from airflow/providers/exasol/CHANGELOG.rst rename to providers/src/airflow/providers/exasol/CHANGELOG.rst diff --git a/airflow/providers/exasol/__init__.py b/providers/src/airflow/providers/exasol/__init__.py similarity index 100% rename from airflow/providers/exasol/__init__.py rename to providers/src/airflow/providers/exasol/__init__.py diff --git a/airflow/providers/elasticsearch/hooks/__init__.py b/providers/src/airflow/providers/exasol/hooks/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/hooks/__init__.py rename to providers/src/airflow/providers/exasol/hooks/__init__.py diff --git a/airflow/providers/exasol/hooks/exasol.py b/providers/src/airflow/providers/exasol/hooks/exasol.py similarity index 100% rename from airflow/providers/exasol/hooks/exasol.py rename to providers/src/airflow/providers/exasol/hooks/exasol.py diff --git a/airflow/providers/exasol/hooks/__init__.py b/providers/src/airflow/providers/exasol/operators/__init__.py similarity index 100% rename from airflow/providers/exasol/hooks/__init__.py rename to providers/src/airflow/providers/exasol/operators/__init__.py diff --git a/airflow/providers/exasol/operators/exasol.py b/providers/src/airflow/providers/exasol/operators/exasol.py similarity index 100% rename from airflow/providers/exasol/operators/exasol.py rename to providers/src/airflow/providers/exasol/operators/exasol.py diff --git a/airflow/providers/exasol/provider.yaml b/providers/src/airflow/providers/exasol/provider.yaml similarity index 100% rename from airflow/providers/exasol/provider.yaml rename to providers/src/airflow/providers/exasol/provider.yaml diff --git a/airflow/providers/fab/CHANGELOG.rst b/providers/src/airflow/providers/fab/CHANGELOG.rst similarity index 100% rename from airflow/providers/fab/CHANGELOG.rst rename to providers/src/airflow/providers/fab/CHANGELOG.rst diff --git a/airflow/providers/fab/__init__.py b/providers/src/airflow/providers/fab/__init__.py similarity index 100% rename from airflow/providers/fab/__init__.py rename to providers/src/airflow/providers/fab/__init__.py diff --git a/airflow/providers/fab/alembic.ini b/providers/src/airflow/providers/fab/alembic.ini similarity index 100% rename from airflow/providers/fab/alembic.ini rename to providers/src/airflow/providers/fab/alembic.ini diff --git a/airflow/providers/exasol/operators/__init__.py b/providers/src/airflow/providers/fab/auth_manager/__init__.py similarity index 100% rename from airflow/providers/exasol/operators/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/__init__.py diff --git a/airflow/providers/fab/auth_manager/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/auth/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/__init__.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/__init__.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/db_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/db_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/db_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/db_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/definition.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/definition.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/definition.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/definition.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/role_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/role_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/role_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/role_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/user_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/user_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/user_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/user_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/utils.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/utils.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/utils.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/utils.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/__init__.py b/providers/src/airflow/providers/fab/auth_manager/decorators/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/decorators/__init__.py diff --git a/airflow/providers/fab/auth_manager/decorators/auth.py b/providers/src/airflow/providers/fab/auth_manager/decorators/auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/decorators/auth.py rename to providers/src/airflow/providers/fab/auth_manager/decorators/auth.py diff --git a/airflow/providers/fab/auth_manager/fab_auth_manager.py b/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py similarity index 100% rename from airflow/providers/fab/auth_manager/fab_auth_manager.py rename to providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py diff --git a/airflow/providers/fab/auth_manager/models/__init__.py b/providers/src/airflow/providers/fab/auth_manager/models/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/models/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/models/__init__.py diff --git a/airflow/providers/fab/auth_manager/models/anonymous_user.py b/providers/src/airflow/providers/fab/auth_manager/models/anonymous_user.py similarity index 100% rename from airflow/providers/fab/auth_manager/models/anonymous_user.py rename to providers/src/airflow/providers/fab/auth_manager/models/anonymous_user.py diff --git a/airflow/providers/fab/auth_manager/models/db.py b/providers/src/airflow/providers/fab/auth_manager/models/db.py similarity index 94% rename from airflow/providers/fab/auth_manager/models/db.py rename to providers/src/airflow/providers/fab/auth_manager/models/db.py index efca4a104182..ce0efef55a1c 100644 --- a/airflow/providers/fab/auth_manager/models/db.py +++ b/providers/src/airflow/providers/fab/auth_manager/models/db.py @@ -16,16 +16,15 @@ # under the License. from __future__ import annotations -import os +from pathlib import Path -import airflow from airflow import settings from airflow.exceptions import AirflowException from airflow.providers.fab.auth_manager.models import metadata from airflow.utils.db import _offline_migration, print_happy_cat from airflow.utils.db_manager import BaseDBManager -PACKAGE_DIR = os.path.dirname(airflow.__file__) +PACKAGE_DIR = Path(__file__).parents[2] _REVISION_HEADS_MAP: dict[str, str] = { "1.4.0": "6709f7a774b9", @@ -37,8 +36,8 @@ class FABDBManager(BaseDBManager): metadata = metadata version_table_name = "alembic_version_fab" - migration_dir = os.path.join(PACKAGE_DIR, "providers/fab/migrations") - alembic_file = os.path.join(PACKAGE_DIR, "providers/fab/alembic.ini") + migration_dir = (PACKAGE_DIR / "migrations").as_posix() + alembic_file = (PACKAGE_DIR / "alembic.ini").as_posix() supports_table_dropping = True def upgradedb(self, to_revision=None, from_revision=None, show_sql_only=False): diff --git a/airflow/providers/fab/auth_manager/openapi/__init__.py b/providers/src/airflow/providers/fab/auth_manager/openapi/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/openapi/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/openapi/__init__.py diff --git a/airflow/providers/fab/auth_manager/openapi/v1.yaml b/providers/src/airflow/providers/fab/auth_manager/openapi/v1.yaml similarity index 100% rename from airflow/providers/fab/auth_manager/openapi/v1.yaml rename to providers/src/airflow/providers/fab/auth_manager/openapi/v1.yaml diff --git a/airflow/providers/fab/auth_manager/decorators/__init__.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/decorators/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/__init__.py diff --git a/airflow/providers/fab/auth_manager/security_manager/constants.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/constants.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/constants.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/constants.py diff --git a/airflow/providers/fab/auth_manager/security_manager/override.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/override.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/override.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/override.py diff --git a/airflow/providers/fab/auth_manager/views/__init__.py b/providers/src/airflow/providers/fab/auth_manager/views/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/views/__init__.py diff --git a/airflow/providers/fab/auth_manager/views/permissions.py b/providers/src/airflow/providers/fab/auth_manager/views/permissions.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/permissions.py rename to providers/src/airflow/providers/fab/auth_manager/views/permissions.py diff --git a/airflow/providers/fab/auth_manager/views/roles_list.py b/providers/src/airflow/providers/fab/auth_manager/views/roles_list.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/roles_list.py rename to providers/src/airflow/providers/fab/auth_manager/views/roles_list.py diff --git a/airflow/providers/fab/auth_manager/views/user.py b/providers/src/airflow/providers/fab/auth_manager/views/user.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user.py rename to providers/src/airflow/providers/fab/auth_manager/views/user.py diff --git a/airflow/providers/fab/auth_manager/views/user_edit.py b/providers/src/airflow/providers/fab/auth_manager/views/user_edit.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user_edit.py rename to providers/src/airflow/providers/fab/auth_manager/views/user_edit.py diff --git a/airflow/providers/fab/auth_manager/views/user_stats.py b/providers/src/airflow/providers/fab/auth_manager/views/user_stats.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user_stats.py rename to providers/src/airflow/providers/fab/auth_manager/views/user_stats.py diff --git a/airflow/providers/fab/migrations/README b/providers/src/airflow/providers/fab/migrations/README similarity index 100% rename from airflow/providers/fab/migrations/README rename to providers/src/airflow/providers/fab/migrations/README diff --git a/airflow/providers/fab/migrations/__init__.py b/providers/src/airflow/providers/fab/migrations/__init__.py similarity index 100% rename from airflow/providers/fab/migrations/__init__.py rename to providers/src/airflow/providers/fab/migrations/__init__.py diff --git a/airflow/providers/fab/migrations/env.py b/providers/src/airflow/providers/fab/migrations/env.py similarity index 100% rename from airflow/providers/fab/migrations/env.py rename to providers/src/airflow/providers/fab/migrations/env.py diff --git a/airflow/providers/fab/migrations/script.py.mako b/providers/src/airflow/providers/fab/migrations/script.py.mako similarity index 100% rename from airflow/providers/fab/migrations/script.py.mako rename to providers/src/airflow/providers/fab/migrations/script.py.mako diff --git a/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py b/providers/src/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py similarity index 100% rename from airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py rename to providers/src/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py diff --git a/airflow/providers/fab/migrations/versions/__init__.py b/providers/src/airflow/providers/fab/migrations/versions/__init__.py similarity index 100% rename from airflow/providers/fab/migrations/versions/__init__.py rename to providers/src/airflow/providers/fab/migrations/versions/__init__.py diff --git a/airflow/providers/fab/provider.yaml b/providers/src/airflow/providers/fab/provider.yaml similarity index 100% rename from airflow/providers/fab/provider.yaml rename to providers/src/airflow/providers/fab/provider.yaml diff --git a/airflow/providers/facebook/.latest-doc-only-change.txt b/providers/src/airflow/providers/facebook/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/facebook/.latest-doc-only-change.txt rename to providers/src/airflow/providers/facebook/.latest-doc-only-change.txt diff --git a/airflow/providers/facebook/CHANGELOG.rst b/providers/src/airflow/providers/facebook/CHANGELOG.rst similarity index 100% rename from airflow/providers/facebook/CHANGELOG.rst rename to providers/src/airflow/providers/facebook/CHANGELOG.rst diff --git a/airflow/providers/facebook/__init__.py b/providers/src/airflow/providers/facebook/__init__.py similarity index 100% rename from airflow/providers/facebook/__init__.py rename to providers/src/airflow/providers/facebook/__init__.py diff --git a/airflow/providers/facebook/ads/__init__.py b/providers/src/airflow/providers/facebook/ads/__init__.py similarity index 100% rename from airflow/providers/facebook/ads/__init__.py rename to providers/src/airflow/providers/facebook/ads/__init__.py diff --git a/airflow/providers/facebook/ads/hooks/__init__.py b/providers/src/airflow/providers/facebook/ads/hooks/__init__.py similarity index 100% rename from airflow/providers/facebook/ads/hooks/__init__.py rename to providers/src/airflow/providers/facebook/ads/hooks/__init__.py diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/providers/src/airflow/providers/facebook/ads/hooks/ads.py similarity index 100% rename from airflow/providers/facebook/ads/hooks/ads.py rename to providers/src/airflow/providers/facebook/ads/hooks/ads.py diff --git a/airflow/providers/facebook/provider.yaml b/providers/src/airflow/providers/facebook/provider.yaml similarity index 100% rename from airflow/providers/facebook/provider.yaml rename to providers/src/airflow/providers/facebook/provider.yaml diff --git a/airflow/providers/ftp/.latest-doc-only-change.txt b/providers/src/airflow/providers/ftp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/ftp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/ftp/.latest-doc-only-change.txt diff --git a/airflow/providers/ftp/CHANGELOG.rst b/providers/src/airflow/providers/ftp/CHANGELOG.rst similarity index 100% rename from airflow/providers/ftp/CHANGELOG.rst rename to providers/src/airflow/providers/ftp/CHANGELOG.rst diff --git a/airflow/providers/ftp/__init__.py b/providers/src/airflow/providers/ftp/__init__.py similarity index 100% rename from airflow/providers/ftp/__init__.py rename to providers/src/airflow/providers/ftp/__init__.py diff --git a/airflow/providers/fab/auth_manager/security_manager/__init__.py b/providers/src/airflow/providers/ftp/hooks/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/__init__.py rename to providers/src/airflow/providers/ftp/hooks/__init__.py diff --git a/airflow/providers/ftp/hooks/ftp.py b/providers/src/airflow/providers/ftp/hooks/ftp.py similarity index 100% rename from airflow/providers/ftp/hooks/ftp.py rename to providers/src/airflow/providers/ftp/hooks/ftp.py diff --git a/airflow/providers/ftp/operators/__init__.py b/providers/src/airflow/providers/ftp/operators/__init__.py similarity index 100% rename from airflow/providers/ftp/operators/__init__.py rename to providers/src/airflow/providers/ftp/operators/__init__.py diff --git a/airflow/providers/ftp/operators/ftp.py b/providers/src/airflow/providers/ftp/operators/ftp.py similarity index 100% rename from airflow/providers/ftp/operators/ftp.py rename to providers/src/airflow/providers/ftp/operators/ftp.py diff --git a/airflow/providers/ftp/provider.yaml b/providers/src/airflow/providers/ftp/provider.yaml similarity index 100% rename from airflow/providers/ftp/provider.yaml rename to providers/src/airflow/providers/ftp/provider.yaml diff --git a/airflow/providers/ftp/hooks/__init__.py b/providers/src/airflow/providers/ftp/sensors/__init__.py similarity index 100% rename from airflow/providers/ftp/hooks/__init__.py rename to providers/src/airflow/providers/ftp/sensors/__init__.py diff --git a/airflow/providers/ftp/sensors/ftp.py b/providers/src/airflow/providers/ftp/sensors/ftp.py similarity index 100% rename from airflow/providers/ftp/sensors/ftp.py rename to providers/src/airflow/providers/ftp/sensors/ftp.py diff --git a/airflow/providers/github/.latest-doc-only-change.txt b/providers/src/airflow/providers/github/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/github/.latest-doc-only-change.txt rename to providers/src/airflow/providers/github/.latest-doc-only-change.txt diff --git a/airflow/providers/github/CHANGELOG.rst b/providers/src/airflow/providers/github/CHANGELOG.rst similarity index 100% rename from airflow/providers/github/CHANGELOG.rst rename to providers/src/airflow/providers/github/CHANGELOG.rst diff --git a/airflow/providers/github/__init__.py b/providers/src/airflow/providers/github/__init__.py similarity index 100% rename from airflow/providers/github/__init__.py rename to providers/src/airflow/providers/github/__init__.py diff --git a/airflow/providers/github/hooks/__init__.py b/providers/src/airflow/providers/github/hooks/__init__.py similarity index 100% rename from airflow/providers/github/hooks/__init__.py rename to providers/src/airflow/providers/github/hooks/__init__.py diff --git a/airflow/providers/github/hooks/github.py b/providers/src/airflow/providers/github/hooks/github.py similarity index 100% rename from airflow/providers/github/hooks/github.py rename to providers/src/airflow/providers/github/hooks/github.py diff --git a/airflow/providers/github/operators/__init__.py b/providers/src/airflow/providers/github/operators/__init__.py similarity index 100% rename from airflow/providers/github/operators/__init__.py rename to providers/src/airflow/providers/github/operators/__init__.py diff --git a/airflow/providers/github/operators/github.py b/providers/src/airflow/providers/github/operators/github.py similarity index 100% rename from airflow/providers/github/operators/github.py rename to providers/src/airflow/providers/github/operators/github.py diff --git a/airflow/providers/github/provider.yaml b/providers/src/airflow/providers/github/provider.yaml similarity index 100% rename from airflow/providers/github/provider.yaml rename to providers/src/airflow/providers/github/provider.yaml diff --git a/airflow/providers/github/sensors/__init__.py b/providers/src/airflow/providers/github/sensors/__init__.py similarity index 100% rename from airflow/providers/github/sensors/__init__.py rename to providers/src/airflow/providers/github/sensors/__init__.py diff --git a/airflow/providers/github/sensors/github.py b/providers/src/airflow/providers/github/sensors/github.py similarity index 100% rename from airflow/providers/github/sensors/github.py rename to providers/src/airflow/providers/github/sensors/github.py diff --git a/airflow/providers/google/.latest-doc-only-change.txt b/providers/src/airflow/providers/google/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/google/.latest-doc-only-change.txt rename to providers/src/airflow/providers/google/.latest-doc-only-change.txt diff --git a/airflow/providers/google/CHANGELOG.rst b/providers/src/airflow/providers/google/CHANGELOG.rst similarity index 100% rename from airflow/providers/google/CHANGELOG.rst rename to providers/src/airflow/providers/google/CHANGELOG.rst diff --git a/airflow/providers/google/__init__.py b/providers/src/airflow/providers/google/__init__.py similarity index 100% rename from airflow/providers/google/__init__.py rename to providers/src/airflow/providers/google/__init__.py diff --git a/airflow/providers/google/ads/.gitignore b/providers/src/airflow/providers/google/ads/.gitignore similarity index 100% rename from airflow/providers/google/ads/.gitignore rename to providers/src/airflow/providers/google/ads/.gitignore diff --git a/airflow/providers/google/ads/__init__.py b/providers/src/airflow/providers/google/ads/__init__.py similarity index 100% rename from airflow/providers/google/ads/__init__.py rename to providers/src/airflow/providers/google/ads/__init__.py diff --git a/airflow/providers/google/ads/hooks/__init__.py b/providers/src/airflow/providers/google/ads/hooks/__init__.py similarity index 100% rename from airflow/providers/google/ads/hooks/__init__.py rename to providers/src/airflow/providers/google/ads/hooks/__init__.py diff --git a/airflow/providers/google/ads/hooks/ads.py b/providers/src/airflow/providers/google/ads/hooks/ads.py similarity index 100% rename from airflow/providers/google/ads/hooks/ads.py rename to providers/src/airflow/providers/google/ads/hooks/ads.py diff --git a/airflow/providers/google/ads/operators/__init__.py b/providers/src/airflow/providers/google/ads/operators/__init__.py similarity index 100% rename from airflow/providers/google/ads/operators/__init__.py rename to providers/src/airflow/providers/google/ads/operators/__init__.py diff --git a/airflow/providers/google/ads/operators/ads.py b/providers/src/airflow/providers/google/ads/operators/ads.py similarity index 100% rename from airflow/providers/google/ads/operators/ads.py rename to providers/src/airflow/providers/google/ads/operators/ads.py diff --git a/airflow/providers/google/ads/transfers/__init__.py b/providers/src/airflow/providers/google/ads/transfers/__init__.py similarity index 100% rename from airflow/providers/google/ads/transfers/__init__.py rename to providers/src/airflow/providers/google/ads/transfers/__init__.py diff --git a/airflow/providers/google/ads/transfers/ads_to_gcs.py b/providers/src/airflow/providers/google/ads/transfers/ads_to_gcs.py similarity index 100% rename from airflow/providers/google/ads/transfers/ads_to_gcs.py rename to providers/src/airflow/providers/google/ads/transfers/ads_to_gcs.py diff --git a/airflow/providers/google/cloud/__init__.py b/providers/src/airflow/providers/google/cloud/__init__.py similarity index 100% rename from airflow/providers/google/cloud/__init__.py rename to providers/src/airflow/providers/google/cloud/__init__.py diff --git a/airflow/providers/google/cloud/_internal_client/__init__.py b/providers/src/airflow/providers/google/cloud/_internal_client/__init__.py similarity index 100% rename from airflow/providers/google/cloud/_internal_client/__init__.py rename to providers/src/airflow/providers/google/cloud/_internal_client/__init__.py diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/providers/src/airflow/providers/google/cloud/_internal_client/secret_manager_client.py similarity index 100% rename from airflow/providers/google/cloud/_internal_client/secret_manager_client.py rename to providers/src/airflow/providers/google/cloud/_internal_client/secret_manager_client.py diff --git a/airflow/providers/google/cloud/example_dags/__init__.py b/providers/src/airflow/providers/google/cloud/example_dags/__init__.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/__init__.py rename to providers/src/airflow/providers/google/cloud/example_dags/__init__.py diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_task.py b/providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_cloud_task.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py diff --git a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py diff --git a/airflow/providers/google/cloud/example_dags/example_looker.py b/providers/src/airflow/providers/google/cloud/example_dags/example_looker.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_looker.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_looker.py diff --git a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py diff --git a/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py diff --git a/airflow/providers/google/cloud/fs/__init__.py b/providers/src/airflow/providers/google/cloud/fs/__init__.py similarity index 100% rename from airflow/providers/google/cloud/fs/__init__.py rename to providers/src/airflow/providers/google/cloud/fs/__init__.py diff --git a/airflow/providers/google/cloud/fs/gcs.py b/providers/src/airflow/providers/google/cloud/fs/gcs.py similarity index 100% rename from airflow/providers/google/cloud/fs/gcs.py rename to providers/src/airflow/providers/google/cloud/fs/gcs.py diff --git a/airflow/providers/google/cloud/hooks/__init__.py b/providers/src/airflow/providers/google/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/google/cloud/hooks/__init__.py rename to providers/src/airflow/providers/google/cloud/hooks/__init__.py diff --git a/airflow/providers/google/cloud/hooks/automl.py b/providers/src/airflow/providers/google/cloud/hooks/automl.py similarity index 100% rename from airflow/providers/google/cloud/hooks/automl.py rename to providers/src/airflow/providers/google/cloud/hooks/automl.py diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/providers/src/airflow/providers/google/cloud/hooks/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigquery.py rename to providers/src/airflow/providers/google/cloud/hooks/bigquery.py diff --git a/airflow/providers/google/cloud/hooks/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/hooks/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/hooks/bigquery_dts.py diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/providers/src/airflow/providers/google/cloud/hooks/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigtable.py rename to providers/src/airflow/providers/google/cloud/hooks/bigtable.py diff --git a/airflow/providers/google/cloud/hooks/cloud_batch.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_batch.py diff --git a/airflow/providers/google/cloud/hooks/cloud_build.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_build.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_build.py diff --git a/airflow/providers/google/cloud/hooks/cloud_composer.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_composer.py diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/hooks/cloud_run.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_run.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_run.py diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_sql.py diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/hooks/compute.py b/providers/src/airflow/providers/google/cloud/hooks/compute.py similarity index 100% rename from airflow/providers/google/cloud/hooks/compute.py rename to providers/src/airflow/providers/google/cloud/hooks/compute.py diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/providers/src/airflow/providers/google/cloud/hooks/compute_ssh.py similarity index 100% rename from airflow/providers/google/cloud/hooks/compute_ssh.py rename to providers/src/airflow/providers/google/cloud/hooks/compute_ssh.py diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/providers/src/airflow/providers/google/cloud/hooks/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datacatalog.py rename to providers/src/airflow/providers/google/cloud/hooks/datacatalog.py diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/providers/src/airflow/providers/google/cloud/hooks/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataflow.py rename to providers/src/airflow/providers/google/cloud/hooks/dataflow.py diff --git a/airflow/providers/google/cloud/hooks/dataform.py b/providers/src/airflow/providers/google/cloud/hooks/dataform.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataform.py rename to providers/src/airflow/providers/google/cloud/hooks/dataform.py diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/providers/src/airflow/providers/google/cloud/hooks/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datafusion.py rename to providers/src/airflow/providers/google/cloud/hooks/datafusion.py diff --git a/airflow/providers/google/cloud/hooks/datapipeline.py b/providers/src/airflow/providers/google/cloud/hooks/datapipeline.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datapipeline.py rename to providers/src/airflow/providers/google/cloud/hooks/datapipeline.py diff --git a/airflow/providers/google/cloud/hooks/dataplex.py b/providers/src/airflow/providers/google/cloud/hooks/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataplex.py rename to providers/src/airflow/providers/google/cloud/hooks/dataplex.py diff --git a/airflow/providers/google/cloud/hooks/dataprep.py b/providers/src/airflow/providers/google/cloud/hooks/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataprep.py rename to providers/src/airflow/providers/google/cloud/hooks/dataprep.py diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/providers/src/airflow/providers/google/cloud/hooks/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataproc.py rename to providers/src/airflow/providers/google/cloud/hooks/dataproc.py diff --git a/airflow/providers/google/cloud/hooks/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/hooks/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/hooks/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/hooks/datastore.py b/providers/src/airflow/providers/google/cloud/hooks/datastore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datastore.py rename to providers/src/airflow/providers/google/cloud/hooks/datastore.py diff --git a/airflow/providers/google/cloud/hooks/dlp.py b/providers/src/airflow/providers/google/cloud/hooks/dlp.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dlp.py rename to providers/src/airflow/providers/google/cloud/hooks/dlp.py diff --git a/airflow/providers/google/cloud/hooks/functions.py b/providers/src/airflow/providers/google/cloud/hooks/functions.py similarity index 100% rename from airflow/providers/google/cloud/hooks/functions.py rename to providers/src/airflow/providers/google/cloud/hooks/functions.py diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/providers/src/airflow/providers/google/cloud/hooks/gcs.py similarity index 100% rename from airflow/providers/google/cloud/hooks/gcs.py rename to providers/src/airflow/providers/google/cloud/hooks/gcs.py diff --git a/airflow/providers/google/cloud/hooks/gdm.py b/providers/src/airflow/providers/google/cloud/hooks/gdm.py similarity index 100% rename from airflow/providers/google/cloud/hooks/gdm.py rename to providers/src/airflow/providers/google/cloud/hooks/gdm.py diff --git a/airflow/providers/google/cloud/hooks/kms.py b/providers/src/airflow/providers/google/cloud/hooks/kms.py similarity index 100% rename from airflow/providers/google/cloud/hooks/kms.py rename to providers/src/airflow/providers/google/cloud/hooks/kms.py diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/hooks/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/hooks/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/hooks/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/hooks/life_sciences.py b/providers/src/airflow/providers/google/cloud/hooks/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/hooks/life_sciences.py rename to providers/src/airflow/providers/google/cloud/hooks/life_sciences.py diff --git a/airflow/providers/google/cloud/hooks/looker.py b/providers/src/airflow/providers/google/cloud/hooks/looker.py similarity index 100% rename from airflow/providers/google/cloud/hooks/looker.py rename to providers/src/airflow/providers/google/cloud/hooks/looker.py diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/providers/src/airflow/providers/google/cloud/hooks/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/hooks/mlengine.py rename to providers/src/airflow/providers/google/cloud/hooks/mlengine.py diff --git a/airflow/providers/google/cloud/hooks/natural_language.py b/providers/src/airflow/providers/google/cloud/hooks/natural_language.py similarity index 100% rename from airflow/providers/google/cloud/hooks/natural_language.py rename to providers/src/airflow/providers/google/cloud/hooks/natural_language.py diff --git a/airflow/providers/google/cloud/hooks/os_login.py b/providers/src/airflow/providers/google/cloud/hooks/os_login.py similarity index 100% rename from airflow/providers/google/cloud/hooks/os_login.py rename to providers/src/airflow/providers/google/cloud/hooks/os_login.py diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/providers/src/airflow/providers/google/cloud/hooks/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/hooks/pubsub.py rename to providers/src/airflow/providers/google/cloud/hooks/pubsub.py diff --git a/airflow/providers/google/cloud/hooks/secret_manager.py b/providers/src/airflow/providers/google/cloud/hooks/secret_manager.py similarity index 100% rename from airflow/providers/google/cloud/hooks/secret_manager.py rename to providers/src/airflow/providers/google/cloud/hooks/secret_manager.py diff --git a/airflow/providers/google/cloud/hooks/spanner.py b/providers/src/airflow/providers/google/cloud/hooks/spanner.py similarity index 100% rename from airflow/providers/google/cloud/hooks/spanner.py rename to providers/src/airflow/providers/google/cloud/hooks/spanner.py diff --git a/airflow/providers/google/cloud/hooks/speech_to_text.py b/providers/src/airflow/providers/google/cloud/hooks/speech_to_text.py similarity index 100% rename from airflow/providers/google/cloud/hooks/speech_to_text.py rename to providers/src/airflow/providers/google/cloud/hooks/speech_to_text.py diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/providers/src/airflow/providers/google/cloud/hooks/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/hooks/stackdriver.py rename to providers/src/airflow/providers/google/cloud/hooks/stackdriver.py diff --git a/airflow/providers/google/cloud/hooks/tasks.py b/providers/src/airflow/providers/google/cloud/hooks/tasks.py similarity index 100% rename from airflow/providers/google/cloud/hooks/tasks.py rename to providers/src/airflow/providers/google/cloud/hooks/tasks.py diff --git a/airflow/providers/google/cloud/hooks/text_to_speech.py b/providers/src/airflow/providers/google/cloud/hooks/text_to_speech.py similarity index 100% rename from airflow/providers/google/cloud/hooks/text_to_speech.py rename to providers/src/airflow/providers/google/cloud/hooks/text_to_speech.py diff --git a/airflow/providers/google/cloud/hooks/translate.py b/providers/src/airflow/providers/google/cloud/hooks/translate.py similarity index 100% rename from airflow/providers/google/cloud/hooks/translate.py rename to providers/src/airflow/providers/google/cloud/hooks/translate.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/__init__.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/dataset.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/model_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py diff --git a/airflow/providers/google/cloud/hooks/video_intelligence.py b/providers/src/airflow/providers/google/cloud/hooks/video_intelligence.py similarity index 100% rename from airflow/providers/google/cloud/hooks/video_intelligence.py rename to providers/src/airflow/providers/google/cloud/hooks/video_intelligence.py diff --git a/airflow/providers/google/cloud/hooks/vision.py b/providers/src/airflow/providers/google/cloud/hooks/vision.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vision.py rename to providers/src/airflow/providers/google/cloud/hooks/vision.py diff --git a/airflow/providers/google/cloud/hooks/workflows.py b/providers/src/airflow/providers/google/cloud/hooks/workflows.py similarity index 100% rename from airflow/providers/google/cloud/hooks/workflows.py rename to providers/src/airflow/providers/google/cloud/hooks/workflows.py diff --git a/airflow/providers/google/cloud/links/__init__.py b/providers/src/airflow/providers/google/cloud/links/__init__.py similarity index 100% rename from airflow/providers/google/cloud/links/__init__.py rename to providers/src/airflow/providers/google/cloud/links/__init__.py diff --git a/airflow/providers/google/cloud/links/automl.py b/providers/src/airflow/providers/google/cloud/links/automl.py similarity index 100% rename from airflow/providers/google/cloud/links/automl.py rename to providers/src/airflow/providers/google/cloud/links/automl.py diff --git a/airflow/providers/google/cloud/links/base.py b/providers/src/airflow/providers/google/cloud/links/base.py similarity index 100% rename from airflow/providers/google/cloud/links/base.py rename to providers/src/airflow/providers/google/cloud/links/base.py diff --git a/airflow/providers/google/cloud/links/bigquery.py b/providers/src/airflow/providers/google/cloud/links/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/links/bigquery.py rename to providers/src/airflow/providers/google/cloud/links/bigquery.py diff --git a/airflow/providers/google/cloud/links/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/links/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/links/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/links/bigquery_dts.py diff --git a/airflow/providers/google/cloud/links/bigtable.py b/providers/src/airflow/providers/google/cloud/links/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/links/bigtable.py rename to providers/src/airflow/providers/google/cloud/links/bigtable.py diff --git a/airflow/providers/google/cloud/links/cloud_build.py b/providers/src/airflow/providers/google/cloud/links/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_build.py rename to providers/src/airflow/providers/google/cloud/links/cloud_build.py diff --git a/airflow/providers/google/cloud/links/cloud_functions.py b/providers/src/airflow/providers/google/cloud/links/cloud_functions.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_functions.py rename to providers/src/airflow/providers/google/cloud/links/cloud_functions.py diff --git a/airflow/providers/google/cloud/links/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/links/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/links/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/links/cloud_sql.py b/providers/src/airflow/providers/google/cloud/links/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/links/cloud_sql.py diff --git a/airflow/providers/google/cloud/links/cloud_storage_transfer.py b/providers/src/airflow/providers/google/cloud/links/cloud_storage_transfer.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_storage_transfer.py rename to providers/src/airflow/providers/google/cloud/links/cloud_storage_transfer.py diff --git a/airflow/providers/google/cloud/links/cloud_tasks.py b/providers/src/airflow/providers/google/cloud/links/cloud_tasks.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_tasks.py rename to providers/src/airflow/providers/google/cloud/links/cloud_tasks.py diff --git a/airflow/providers/google/cloud/links/compute.py b/providers/src/airflow/providers/google/cloud/links/compute.py similarity index 100% rename from airflow/providers/google/cloud/links/compute.py rename to providers/src/airflow/providers/google/cloud/links/compute.py diff --git a/airflow/providers/google/cloud/links/data_loss_prevention.py b/providers/src/airflow/providers/google/cloud/links/data_loss_prevention.py similarity index 100% rename from airflow/providers/google/cloud/links/data_loss_prevention.py rename to providers/src/airflow/providers/google/cloud/links/data_loss_prevention.py diff --git a/airflow/providers/google/cloud/links/datacatalog.py b/providers/src/airflow/providers/google/cloud/links/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/links/datacatalog.py rename to providers/src/airflow/providers/google/cloud/links/datacatalog.py diff --git a/airflow/providers/google/cloud/links/dataflow.py b/providers/src/airflow/providers/google/cloud/links/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/links/dataflow.py rename to providers/src/airflow/providers/google/cloud/links/dataflow.py diff --git a/airflow/providers/google/cloud/links/dataform.py b/providers/src/airflow/providers/google/cloud/links/dataform.py similarity index 100% rename from airflow/providers/google/cloud/links/dataform.py rename to providers/src/airflow/providers/google/cloud/links/dataform.py diff --git a/airflow/providers/google/cloud/links/datafusion.py b/providers/src/airflow/providers/google/cloud/links/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/links/datafusion.py rename to providers/src/airflow/providers/google/cloud/links/datafusion.py diff --git a/airflow/providers/google/cloud/links/dataplex.py b/providers/src/airflow/providers/google/cloud/links/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/links/dataplex.py rename to providers/src/airflow/providers/google/cloud/links/dataplex.py diff --git a/airflow/providers/google/cloud/links/dataprep.py b/providers/src/airflow/providers/google/cloud/links/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/links/dataprep.py rename to providers/src/airflow/providers/google/cloud/links/dataprep.py diff --git a/airflow/providers/google/cloud/links/dataproc.py b/providers/src/airflow/providers/google/cloud/links/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/links/dataproc.py rename to providers/src/airflow/providers/google/cloud/links/dataproc.py diff --git a/airflow/providers/google/cloud/links/datastore.py b/providers/src/airflow/providers/google/cloud/links/datastore.py similarity index 100% rename from airflow/providers/google/cloud/links/datastore.py rename to providers/src/airflow/providers/google/cloud/links/datastore.py diff --git a/airflow/providers/google/cloud/links/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/links/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/links/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/links/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/links/life_sciences.py b/providers/src/airflow/providers/google/cloud/links/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/links/life_sciences.py rename to providers/src/airflow/providers/google/cloud/links/life_sciences.py diff --git a/airflow/providers/google/cloud/links/mlengine.py b/providers/src/airflow/providers/google/cloud/links/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/links/mlengine.py rename to providers/src/airflow/providers/google/cloud/links/mlengine.py diff --git a/airflow/providers/google/cloud/links/pubsub.py b/providers/src/airflow/providers/google/cloud/links/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/links/pubsub.py rename to providers/src/airflow/providers/google/cloud/links/pubsub.py diff --git a/airflow/providers/google/cloud/links/spanner.py b/providers/src/airflow/providers/google/cloud/links/spanner.py similarity index 100% rename from airflow/providers/google/cloud/links/spanner.py rename to providers/src/airflow/providers/google/cloud/links/spanner.py diff --git a/airflow/providers/google/cloud/links/stackdriver.py b/providers/src/airflow/providers/google/cloud/links/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/links/stackdriver.py rename to providers/src/airflow/providers/google/cloud/links/stackdriver.py diff --git a/airflow/providers/google/cloud/links/translate.py b/providers/src/airflow/providers/google/cloud/links/translate.py similarity index 100% rename from airflow/providers/google/cloud/links/translate.py rename to providers/src/airflow/providers/google/cloud/links/translate.py diff --git a/airflow/providers/google/cloud/links/vertex_ai.py b/providers/src/airflow/providers/google/cloud/links/vertex_ai.py similarity index 100% rename from airflow/providers/google/cloud/links/vertex_ai.py rename to providers/src/airflow/providers/google/cloud/links/vertex_ai.py diff --git a/airflow/providers/google/cloud/links/workflows.py b/providers/src/airflow/providers/google/cloud/links/workflows.py similarity index 100% rename from airflow/providers/google/cloud/links/workflows.py rename to providers/src/airflow/providers/google/cloud/links/workflows.py diff --git a/airflow/providers/google/cloud/log/__init__.py b/providers/src/airflow/providers/google/cloud/log/__init__.py similarity index 100% rename from airflow/providers/google/cloud/log/__init__.py rename to providers/src/airflow/providers/google/cloud/log/__init__.py diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/providers/src/airflow/providers/google/cloud/log/gcs_task_handler.py similarity index 100% rename from airflow/providers/google/cloud/log/gcs_task_handler.py rename to providers/src/airflow/providers/google/cloud/log/gcs_task_handler.py diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/providers/src/airflow/providers/google/cloud/log/stackdriver_task_handler.py similarity index 100% rename from airflow/providers/google/cloud/log/stackdriver_task_handler.py rename to providers/src/airflow/providers/google/cloud/log/stackdriver_task_handler.py diff --git a/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json b/providers/src/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json similarity index 100% rename from airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json rename to providers/src/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json diff --git a/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json b/providers/src/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json similarity index 100% rename from airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json rename to providers/src/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json diff --git a/airflow/providers/google/cloud/openlineage/__init__.py b/providers/src/airflow/providers/google/cloud/openlineage/__init__.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/__init__.py rename to providers/src/airflow/providers/google/cloud/openlineage/__init__.py diff --git a/airflow/providers/google/cloud/openlineage/mixins.py b/providers/src/airflow/providers/google/cloud/openlineage/mixins.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/mixins.py rename to providers/src/airflow/providers/google/cloud/openlineage/mixins.py diff --git a/airflow/providers/google/cloud/openlineage/utils.py b/providers/src/airflow/providers/google/cloud/openlineage/utils.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/utils.py rename to providers/src/airflow/providers/google/cloud/openlineage/utils.py diff --git a/airflow/providers/google/cloud/operators/__init__.py b/providers/src/airflow/providers/google/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/google/cloud/operators/__init__.py rename to providers/src/airflow/providers/google/cloud/operators/__init__.py diff --git a/airflow/providers/google/cloud/operators/automl.py b/providers/src/airflow/providers/google/cloud/operators/automl.py similarity index 100% rename from airflow/providers/google/cloud/operators/automl.py rename to providers/src/airflow/providers/google/cloud/operators/automl.py diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/providers/src/airflow/providers/google/cloud/operators/bigquery.py similarity index 99% rename from airflow/providers/google/cloud/operators/bigquery.py rename to providers/src/airflow/providers/google/cloud/operators/bigquery.py index 1637f51d8f89..876ff8d51f16 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/providers/src/airflow/providers/google/cloud/operators/bigquery.py @@ -35,7 +35,7 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException from airflow.models import BaseOperator, BaseOperatorLink from airflow.models.xcom import XCom -from airflow.providers.common.sql.operators.sql import ( +from airflow.providers.common.sql.operators.sql import ( # type: ignore[attr-defined] # for _parse_boolean SQLCheckOperator, SQLColumnCheckOperator, SQLIntervalCheckOperator, diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/operators/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/operators/bigquery_dts.py diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/providers/src/airflow/providers/google/cloud/operators/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigtable.py rename to providers/src/airflow/providers/google/cloud/operators/bigtable.py diff --git a/airflow/providers/google/cloud/operators/cloud_base.py b/providers/src/airflow/providers/google/cloud/operators/cloud_base.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_base.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_base.py diff --git a/airflow/providers/google/cloud/operators/cloud_batch.py b/providers/src/airflow/providers/google/cloud/operators/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_batch.py diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/providers/src/airflow/providers/google/cloud/operators/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_build.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_build.py diff --git a/airflow/providers/google/cloud/operators/cloud_composer.py b/providers/src/airflow/providers/google/cloud/operators/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_composer.py diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/operators/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/operators/cloud_run.py b/providers/src/airflow/providers/google/cloud/operators/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_run.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_run.py diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/providers/src/airflow/providers/google/cloud/operators/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_sql.py diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/operators/compute.py b/providers/src/airflow/providers/google/cloud/operators/compute.py similarity index 100% rename from airflow/providers/google/cloud/operators/compute.py rename to providers/src/airflow/providers/google/cloud/operators/compute.py diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/providers/src/airflow/providers/google/cloud/operators/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/operators/datacatalog.py rename to providers/src/airflow/providers/google/cloud/operators/datacatalog.py diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/providers/src/airflow/providers/google/cloud/operators/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataflow.py rename to providers/src/airflow/providers/google/cloud/operators/dataflow.py diff --git a/airflow/providers/google/cloud/operators/dataform.py b/providers/src/airflow/providers/google/cloud/operators/dataform.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataform.py rename to providers/src/airflow/providers/google/cloud/operators/dataform.py diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/providers/src/airflow/providers/google/cloud/operators/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/operators/datafusion.py rename to providers/src/airflow/providers/google/cloud/operators/datafusion.py diff --git a/airflow/providers/google/cloud/operators/datapipeline.py b/providers/src/airflow/providers/google/cloud/operators/datapipeline.py similarity index 100% rename from airflow/providers/google/cloud/operators/datapipeline.py rename to providers/src/airflow/providers/google/cloud/operators/datapipeline.py diff --git a/airflow/providers/google/cloud/operators/dataplex.py b/providers/src/airflow/providers/google/cloud/operators/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataplex.py rename to providers/src/airflow/providers/google/cloud/operators/dataplex.py diff --git a/airflow/providers/google/cloud/operators/dataprep.py b/providers/src/airflow/providers/google/cloud/operators/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataprep.py rename to providers/src/airflow/providers/google/cloud/operators/dataprep.py diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/providers/src/airflow/providers/google/cloud/operators/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataproc.py rename to providers/src/airflow/providers/google/cloud/operators/dataproc.py diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/operators/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/operators/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/operators/datastore.py b/providers/src/airflow/providers/google/cloud/operators/datastore.py similarity index 100% rename from airflow/providers/google/cloud/operators/datastore.py rename to providers/src/airflow/providers/google/cloud/operators/datastore.py diff --git a/airflow/providers/google/cloud/operators/dlp.py b/providers/src/airflow/providers/google/cloud/operators/dlp.py similarity index 100% rename from airflow/providers/google/cloud/operators/dlp.py rename to providers/src/airflow/providers/google/cloud/operators/dlp.py diff --git a/airflow/providers/google/cloud/operators/functions.py b/providers/src/airflow/providers/google/cloud/operators/functions.py similarity index 100% rename from airflow/providers/google/cloud/operators/functions.py rename to providers/src/airflow/providers/google/cloud/operators/functions.py diff --git a/airflow/providers/google/cloud/operators/gcs.py b/providers/src/airflow/providers/google/cloud/operators/gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/gcs.py rename to providers/src/airflow/providers/google/cloud/operators/gcs.py diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/operators/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/operators/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/operators/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/operators/life_sciences.py b/providers/src/airflow/providers/google/cloud/operators/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/operators/life_sciences.py rename to providers/src/airflow/providers/google/cloud/operators/life_sciences.py diff --git a/airflow/providers/google/cloud/operators/looker.py b/providers/src/airflow/providers/google/cloud/operators/looker.py similarity index 100% rename from airflow/providers/google/cloud/operators/looker.py rename to providers/src/airflow/providers/google/cloud/operators/looker.py diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/providers/src/airflow/providers/google/cloud/operators/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/operators/mlengine.py rename to providers/src/airflow/providers/google/cloud/operators/mlengine.py diff --git a/airflow/providers/google/cloud/operators/natural_language.py b/providers/src/airflow/providers/google/cloud/operators/natural_language.py similarity index 100% rename from airflow/providers/google/cloud/operators/natural_language.py rename to providers/src/airflow/providers/google/cloud/operators/natural_language.py diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/providers/src/airflow/providers/google/cloud/operators/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/operators/pubsub.py rename to providers/src/airflow/providers/google/cloud/operators/pubsub.py diff --git a/airflow/providers/google/cloud/operators/spanner.py b/providers/src/airflow/providers/google/cloud/operators/spanner.py similarity index 100% rename from airflow/providers/google/cloud/operators/spanner.py rename to providers/src/airflow/providers/google/cloud/operators/spanner.py diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/providers/src/airflow/providers/google/cloud/operators/speech_to_text.py similarity index 100% rename from airflow/providers/google/cloud/operators/speech_to_text.py rename to providers/src/airflow/providers/google/cloud/operators/speech_to_text.py diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/providers/src/airflow/providers/google/cloud/operators/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/operators/stackdriver.py rename to providers/src/airflow/providers/google/cloud/operators/stackdriver.py diff --git a/airflow/providers/google/cloud/operators/tasks.py b/providers/src/airflow/providers/google/cloud/operators/tasks.py similarity index 100% rename from airflow/providers/google/cloud/operators/tasks.py rename to providers/src/airflow/providers/google/cloud/operators/tasks.py diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/providers/src/airflow/providers/google/cloud/operators/text_to_speech.py similarity index 100% rename from airflow/providers/google/cloud/operators/text_to_speech.py rename to providers/src/airflow/providers/google/cloud/operators/text_to_speech.py diff --git a/airflow/providers/google/cloud/operators/translate.py b/providers/src/airflow/providers/google/cloud/operators/translate.py similarity index 100% rename from airflow/providers/google/cloud/operators/translate.py rename to providers/src/airflow/providers/google/cloud/operators/translate.py diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/providers/src/airflow/providers/google/cloud/operators/translate_speech.py similarity index 100% rename from airflow/providers/google/cloud/operators/translate_speech.py rename to providers/src/airflow/providers/google/cloud/operators/translate_speech.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/__init__.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/__init__.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/__init__.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/__init__.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/custom_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/dataset.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/dataset.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/dataset.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/dataset.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/generative_model.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/model_service.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/model_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/model_service.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/model_service.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py diff --git a/airflow/providers/google/cloud/operators/video_intelligence.py b/providers/src/airflow/providers/google/cloud/operators/video_intelligence.py similarity index 100% rename from airflow/providers/google/cloud/operators/video_intelligence.py rename to providers/src/airflow/providers/google/cloud/operators/video_intelligence.py diff --git a/airflow/providers/google/cloud/operators/vision.py b/providers/src/airflow/providers/google/cloud/operators/vision.py similarity index 100% rename from airflow/providers/google/cloud/operators/vision.py rename to providers/src/airflow/providers/google/cloud/operators/vision.py diff --git a/airflow/providers/google/cloud/operators/workflows.py b/providers/src/airflow/providers/google/cloud/operators/workflows.py similarity index 100% rename from airflow/providers/google/cloud/operators/workflows.py rename to providers/src/airflow/providers/google/cloud/operators/workflows.py diff --git a/airflow/providers/google/cloud/secrets/__init__.py b/providers/src/airflow/providers/google/cloud/secrets/__init__.py similarity index 100% rename from airflow/providers/google/cloud/secrets/__init__.py rename to providers/src/airflow/providers/google/cloud/secrets/__init__.py diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/providers/src/airflow/providers/google/cloud/secrets/secret_manager.py similarity index 100% rename from airflow/providers/google/cloud/secrets/secret_manager.py rename to providers/src/airflow/providers/google/cloud/secrets/secret_manager.py diff --git a/airflow/providers/google/cloud/sensors/__init__.py b/providers/src/airflow/providers/google/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/google/cloud/sensors/__init__.py rename to providers/src/airflow/providers/google/cloud/sensors/__init__.py diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/providers/src/airflow/providers/google/cloud/sensors/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigquery.py rename to providers/src/airflow/providers/google/cloud/sensors/bigquery.py diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/sensors/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/sensors/bigquery_dts.py diff --git a/airflow/providers/google/cloud/sensors/bigtable.py b/providers/src/airflow/providers/google/cloud/sensors/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigtable.py rename to providers/src/airflow/providers/google/cloud/sensors/bigtable.py diff --git a/airflow/providers/google/cloud/sensors/cloud_composer.py b/providers/src/airflow/providers/google/cloud/sensors/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/sensors/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/sensors/cloud_composer.py diff --git a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/sensors/dataflow.py b/providers/src/airflow/providers/google/cloud/sensors/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataflow.py rename to providers/src/airflow/providers/google/cloud/sensors/dataflow.py diff --git a/airflow/providers/google/cloud/sensors/dataform.py b/providers/src/airflow/providers/google/cloud/sensors/dataform.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataform.py rename to providers/src/airflow/providers/google/cloud/sensors/dataform.py diff --git a/airflow/providers/google/cloud/sensors/datafusion.py b/providers/src/airflow/providers/google/cloud/sensors/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/sensors/datafusion.py rename to providers/src/airflow/providers/google/cloud/sensors/datafusion.py diff --git a/airflow/providers/google/cloud/sensors/dataplex.py b/providers/src/airflow/providers/google/cloud/sensors/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataplex.py rename to providers/src/airflow/providers/google/cloud/sensors/dataplex.py diff --git a/airflow/providers/google/cloud/sensors/dataprep.py b/providers/src/airflow/providers/google/cloud/sensors/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataprep.py rename to providers/src/airflow/providers/google/cloud/sensors/dataprep.py diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/providers/src/airflow/providers/google/cloud/sensors/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataproc.py rename to providers/src/airflow/providers/google/cloud/sensors/dataproc.py diff --git a/airflow/providers/google/cloud/sensors/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/sensors/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/sensors/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/providers/src/airflow/providers/google/cloud/sensors/gcs.py similarity index 100% rename from airflow/providers/google/cloud/sensors/gcs.py rename to providers/src/airflow/providers/google/cloud/sensors/gcs.py diff --git a/airflow/providers/google/cloud/sensors/looker.py b/providers/src/airflow/providers/google/cloud/sensors/looker.py similarity index 100% rename from airflow/providers/google/cloud/sensors/looker.py rename to providers/src/airflow/providers/google/cloud/sensors/looker.py diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/providers/src/airflow/providers/google/cloud/sensors/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/sensors/pubsub.py rename to providers/src/airflow/providers/google/cloud/sensors/pubsub.py diff --git a/airflow/providers/google/cloud/sensors/tasks.py b/providers/src/airflow/providers/google/cloud/sensors/tasks.py similarity index 100% rename from airflow/providers/google/cloud/sensors/tasks.py rename to providers/src/airflow/providers/google/cloud/sensors/tasks.py diff --git a/airflow/providers/google/cloud/sensors/workflows.py b/providers/src/airflow/providers/google/cloud/sensors/workflows.py similarity index 100% rename from airflow/providers/google/cloud/sensors/workflows.py rename to providers/src/airflow/providers/google/cloud/sensors/workflows.py diff --git a/airflow/providers/google/cloud/transfers/__init__.py b/providers/src/airflow/providers/google/cloud/transfers/__init__.py similarity index 100% rename from airflow/providers/google/cloud/transfers/__init__.py rename to providers/src/airflow/providers/google/cloud/transfers/__init__.py diff --git a/airflow/providers/google/cloud/transfers/adls_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/adls_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_mssql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_mysql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_postgres.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_sql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_sql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_sql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_sql.py diff --git a/airflow/providers/google/cloud/transfers/calendar_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/calendar_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/calendar_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/calendar_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/cassandra_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_bigquery.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_local.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_local.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_local.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_sftp.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_sftp.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_sftp.py diff --git a/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gdrive_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gdrive_to_local.py b/providers/src/airflow/providers/google/cloud/transfers/gdrive_to_local.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gdrive_to_local.py rename to providers/src/airflow/providers/google/cloud/transfers/gdrive_to_local.py diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/local_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/local_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/local_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/mssql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/mysql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/oracle_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/oracle_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/oracle_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/oracle_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/postgres_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/postgres_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/postgres_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/presto_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/presto_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/presto_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/presto_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/s3_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/s3_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/s3_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/s3_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/salesforce_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sftp_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sftp_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sftp_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sheets_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sheets_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sheets_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sheets_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/trino_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/trino_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/trino_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/trino_to_gcs.py diff --git a/airflow/providers/google/cloud/triggers/__init__.py b/providers/src/airflow/providers/google/cloud/triggers/__init__.py similarity index 100% rename from airflow/providers/google/cloud/triggers/__init__.py rename to providers/src/airflow/providers/google/cloud/triggers/__init__.py diff --git a/airflow/providers/google/cloud/triggers/bigquery.py b/providers/src/airflow/providers/google/cloud/triggers/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/triggers/bigquery.py rename to providers/src/airflow/providers/google/cloud/triggers/bigquery.py diff --git a/airflow/providers/google/cloud/triggers/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/triggers/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/triggers/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/triggers/bigquery_dts.py diff --git a/airflow/providers/google/cloud/triggers/cloud_batch.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_batch.py diff --git a/airflow/providers/google/cloud/triggers/cloud_build.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_build.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_build.py diff --git a/airflow/providers/google/cloud/triggers/cloud_composer.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_composer.py diff --git a/airflow/providers/google/cloud/triggers/cloud_run.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_run.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_run.py diff --git a/airflow/providers/google/cloud/triggers/cloud_sql.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_sql.py diff --git a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/triggers/dataflow.py b/providers/src/airflow/providers/google/cloud/triggers/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataflow.py rename to providers/src/airflow/providers/google/cloud/triggers/dataflow.py diff --git a/airflow/providers/google/cloud/triggers/datafusion.py b/providers/src/airflow/providers/google/cloud/triggers/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/triggers/datafusion.py rename to providers/src/airflow/providers/google/cloud/triggers/datafusion.py diff --git a/airflow/providers/google/cloud/triggers/dataplex.py b/providers/src/airflow/providers/google/cloud/triggers/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataplex.py rename to providers/src/airflow/providers/google/cloud/triggers/dataplex.py diff --git a/airflow/providers/google/cloud/triggers/dataproc.py b/providers/src/airflow/providers/google/cloud/triggers/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataproc.py rename to providers/src/airflow/providers/google/cloud/triggers/dataproc.py diff --git a/airflow/providers/google/cloud/triggers/gcs.py b/providers/src/airflow/providers/google/cloud/triggers/gcs.py similarity index 100% rename from airflow/providers/google/cloud/triggers/gcs.py rename to providers/src/airflow/providers/google/cloud/triggers/gcs.py diff --git a/airflow/providers/google/cloud/triggers/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/triggers/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/triggers/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/triggers/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/triggers/mlengine.py b/providers/src/airflow/providers/google/cloud/triggers/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/triggers/mlengine.py rename to providers/src/airflow/providers/google/cloud/triggers/mlengine.py diff --git a/airflow/providers/google/cloud/triggers/pubsub.py b/providers/src/airflow/providers/google/cloud/triggers/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/triggers/pubsub.py rename to providers/src/airflow/providers/google/cloud/triggers/pubsub.py diff --git a/airflow/providers/google/cloud/triggers/vertex_ai.py b/providers/src/airflow/providers/google/cloud/triggers/vertex_ai.py similarity index 100% rename from airflow/providers/google/cloud/triggers/vertex_ai.py rename to providers/src/airflow/providers/google/cloud/triggers/vertex_ai.py diff --git a/airflow/providers/ftp/sensors/__init__.py b/providers/src/airflow/providers/google/cloud/utils/__init__.py similarity index 100% rename from airflow/providers/ftp/sensors/__init__.py rename to providers/src/airflow/providers/google/cloud/utils/__init__.py diff --git a/airflow/providers/google/cloud/utils/bigquery.py b/providers/src/airflow/providers/google/cloud/utils/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/utils/bigquery.py rename to providers/src/airflow/providers/google/cloud/utils/bigquery.py diff --git a/airflow/providers/google/cloud/utils/bigquery_get_data.py b/providers/src/airflow/providers/google/cloud/utils/bigquery_get_data.py similarity index 100% rename from airflow/providers/google/cloud/utils/bigquery_get_data.py rename to providers/src/airflow/providers/google/cloud/utils/bigquery_get_data.py diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/providers/src/airflow/providers/google/cloud/utils/credentials_provider.py similarity index 100% rename from airflow/providers/google/cloud/utils/credentials_provider.py rename to providers/src/airflow/providers/google/cloud/utils/credentials_provider.py diff --git a/airflow/providers/google/cloud/utils/dataform.py b/providers/src/airflow/providers/google/cloud/utils/dataform.py similarity index 100% rename from airflow/providers/google/cloud/utils/dataform.py rename to providers/src/airflow/providers/google/cloud/utils/dataform.py diff --git a/airflow/providers/google/cloud/utils/datafusion.py b/providers/src/airflow/providers/google/cloud/utils/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/utils/datafusion.py rename to providers/src/airflow/providers/google/cloud/utils/datafusion.py diff --git a/airflow/providers/google/cloud/utils/dataproc.py b/providers/src/airflow/providers/google/cloud/utils/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/utils/dataproc.py rename to providers/src/airflow/providers/google/cloud/utils/dataproc.py diff --git a/airflow/providers/google/cloud/utils/external_token_supplier.py b/providers/src/airflow/providers/google/cloud/utils/external_token_supplier.py similarity index 100% rename from airflow/providers/google/cloud/utils/external_token_supplier.py rename to providers/src/airflow/providers/google/cloud/utils/external_token_supplier.py diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/providers/src/airflow/providers/google/cloud/utils/field_sanitizer.py similarity index 100% rename from airflow/providers/google/cloud/utils/field_sanitizer.py rename to providers/src/airflow/providers/google/cloud/utils/field_sanitizer.py diff --git a/airflow/providers/google/cloud/utils/field_validator.py b/providers/src/airflow/providers/google/cloud/utils/field_validator.py similarity index 100% rename from airflow/providers/google/cloud/utils/field_validator.py rename to providers/src/airflow/providers/google/cloud/utils/field_validator.py diff --git a/airflow/providers/google/cloud/utils/helpers.py b/providers/src/airflow/providers/google/cloud/utils/helpers.py similarity index 100% rename from airflow/providers/google/cloud/utils/helpers.py rename to providers/src/airflow/providers/google/cloud/utils/helpers.py diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/providers/src/airflow/providers/google/cloud/utils/mlengine_operator_utils.py similarity index 100% rename from airflow/providers/google/cloud/utils/mlengine_operator_utils.py rename to providers/src/airflow/providers/google/cloud/utils/mlengine_operator_utils.py diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/providers/src/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py similarity index 100% rename from airflow/providers/google/cloud/utils/mlengine_prediction_summary.py rename to providers/src/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py diff --git a/airflow/providers/google/common/__init__.py b/providers/src/airflow/providers/google/common/__init__.py similarity index 100% rename from airflow/providers/google/common/__init__.py rename to providers/src/airflow/providers/google/common/__init__.py diff --git a/airflow/providers/google/common/auth_backend/__init__.py b/providers/src/airflow/providers/google/common/auth_backend/__init__.py similarity index 100% rename from airflow/providers/google/common/auth_backend/__init__.py rename to providers/src/airflow/providers/google/common/auth_backend/__init__.py diff --git a/airflow/providers/google/common/auth_backend/google_openid.py b/providers/src/airflow/providers/google/common/auth_backend/google_openid.py similarity index 100% rename from airflow/providers/google/common/auth_backend/google_openid.py rename to providers/src/airflow/providers/google/common/auth_backend/google_openid.py diff --git a/airflow/providers/google/common/consts.py b/providers/src/airflow/providers/google/common/consts.py similarity index 100% rename from airflow/providers/google/common/consts.py rename to providers/src/airflow/providers/google/common/consts.py diff --git a/airflow/providers/google/common/deprecated.py b/providers/src/airflow/providers/google/common/deprecated.py similarity index 100% rename from airflow/providers/google/common/deprecated.py rename to providers/src/airflow/providers/google/common/deprecated.py diff --git a/airflow/providers/google/common/hooks/__init__.py b/providers/src/airflow/providers/google/common/hooks/__init__.py similarity index 100% rename from airflow/providers/google/common/hooks/__init__.py rename to providers/src/airflow/providers/google/common/hooks/__init__.py diff --git a/airflow/providers/google/common/hooks/base_google.py b/providers/src/airflow/providers/google/common/hooks/base_google.py similarity index 100% rename from airflow/providers/google/common/hooks/base_google.py rename to providers/src/airflow/providers/google/common/hooks/base_google.py diff --git a/airflow/providers/google/common/hooks/discovery_api.py b/providers/src/airflow/providers/google/common/hooks/discovery_api.py similarity index 100% rename from airflow/providers/google/common/hooks/discovery_api.py rename to providers/src/airflow/providers/google/common/hooks/discovery_api.py diff --git a/airflow/providers/google/common/links/__init__.py b/providers/src/airflow/providers/google/common/links/__init__.py similarity index 100% rename from airflow/providers/google/common/links/__init__.py rename to providers/src/airflow/providers/google/common/links/__init__.py diff --git a/airflow/providers/google/common/links/storage.py b/providers/src/airflow/providers/google/common/links/storage.py similarity index 100% rename from airflow/providers/google/common/links/storage.py rename to providers/src/airflow/providers/google/common/links/storage.py diff --git a/airflow/providers/google/common/utils/__init__.py b/providers/src/airflow/providers/google/common/utils/__init__.py similarity index 100% rename from airflow/providers/google/common/utils/__init__.py rename to providers/src/airflow/providers/google/common/utils/__init__.py diff --git a/airflow/providers/google/common/utils/id_token_credentials.py b/providers/src/airflow/providers/google/common/utils/id_token_credentials.py similarity index 100% rename from airflow/providers/google/common/utils/id_token_credentials.py rename to providers/src/airflow/providers/google/common/utils/id_token_credentials.py diff --git a/airflow/providers/google/datasets/__init__.py b/providers/src/airflow/providers/google/datasets/__init__.py similarity index 100% rename from airflow/providers/google/datasets/__init__.py rename to providers/src/airflow/providers/google/datasets/__init__.py diff --git a/airflow/providers/google/datasets/bigquery.py b/providers/src/airflow/providers/google/datasets/bigquery.py similarity index 100% rename from airflow/providers/google/datasets/bigquery.py rename to providers/src/airflow/providers/google/datasets/bigquery.py diff --git a/airflow/providers/google/firebase/__init__.py b/providers/src/airflow/providers/google/firebase/__init__.py similarity index 100% rename from airflow/providers/google/firebase/__init__.py rename to providers/src/airflow/providers/google/firebase/__init__.py diff --git a/airflow/providers/google/firebase/hooks/__init__.py b/providers/src/airflow/providers/google/firebase/hooks/__init__.py similarity index 100% rename from airflow/providers/google/firebase/hooks/__init__.py rename to providers/src/airflow/providers/google/firebase/hooks/__init__.py diff --git a/airflow/providers/google/firebase/hooks/firestore.py b/providers/src/airflow/providers/google/firebase/hooks/firestore.py similarity index 100% rename from airflow/providers/google/firebase/hooks/firestore.py rename to providers/src/airflow/providers/google/firebase/hooks/firestore.py diff --git a/airflow/providers/google/firebase/operators/__init__.py b/providers/src/airflow/providers/google/firebase/operators/__init__.py similarity index 100% rename from airflow/providers/google/firebase/operators/__init__.py rename to providers/src/airflow/providers/google/firebase/operators/__init__.py diff --git a/airflow/providers/google/firebase/operators/firestore.py b/providers/src/airflow/providers/google/firebase/operators/firestore.py similarity index 100% rename from airflow/providers/google/firebase/operators/firestore.py rename to providers/src/airflow/providers/google/firebase/operators/firestore.py diff --git a/airflow/providers/google/go_module_utils.py b/providers/src/airflow/providers/google/go_module_utils.py similarity index 100% rename from airflow/providers/google/go_module_utils.py rename to providers/src/airflow/providers/google/go_module_utils.py diff --git a/airflow/providers/google/leveldb/__init__.py b/providers/src/airflow/providers/google/leveldb/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/__init__.py rename to providers/src/airflow/providers/google/leveldb/__init__.py diff --git a/airflow/providers/google/leveldb/hooks/__init__.py b/providers/src/airflow/providers/google/leveldb/hooks/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/hooks/__init__.py rename to providers/src/airflow/providers/google/leveldb/hooks/__init__.py diff --git a/airflow/providers/google/leveldb/hooks/leveldb.py b/providers/src/airflow/providers/google/leveldb/hooks/leveldb.py similarity index 100% rename from airflow/providers/google/leveldb/hooks/leveldb.py rename to providers/src/airflow/providers/google/leveldb/hooks/leveldb.py diff --git a/airflow/providers/google/leveldb/operators/__init__.py b/providers/src/airflow/providers/google/leveldb/operators/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/operators/__init__.py rename to providers/src/airflow/providers/google/leveldb/operators/__init__.py diff --git a/airflow/providers/google/leveldb/operators/leveldb.py b/providers/src/airflow/providers/google/leveldb/operators/leveldb.py similarity index 100% rename from airflow/providers/google/leveldb/operators/leveldb.py rename to providers/src/airflow/providers/google/leveldb/operators/leveldb.py diff --git a/airflow/providers/google/marketing_platform/__init__.py b/providers/src/airflow/providers/google/marketing_platform/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/__init__.py diff --git a/airflow/providers/google/marketing_platform/example_dags/__init__.py b/providers/src/airflow/providers/google/marketing_platform/example_dags/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/example_dags/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/example_dags/__init__.py diff --git a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/example_dags/example_display_video.py rename to providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py diff --git a/airflow/providers/google/marketing_platform/hooks/__init__.py b/providers/src/airflow/providers/google/marketing_platform/hooks/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/__init__.py diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/providers/src/airflow/providers/google/marketing_platform/hooks/analytics.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/analytics.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/analytics.py diff --git a/airflow/providers/google/marketing_platform/hooks/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/hooks/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/hooks/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/hooks/display_video.py b/providers/src/airflow/providers/google/marketing_platform/hooks/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/display_video.py diff --git a/airflow/providers/google/marketing_platform/hooks/search_ads.py b/providers/src/airflow/providers/google/marketing_platform/hooks/search_ads.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/search_ads.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/search_ads.py diff --git a/airflow/providers/google/marketing_platform/links/__init__.py b/providers/src/airflow/providers/google/marketing_platform/links/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/links/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/links/__init__.py diff --git a/airflow/providers/google/marketing_platform/links/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/links/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/links/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/links/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/operators/__init__.py b/providers/src/airflow/providers/google/marketing_platform/operators/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/operators/__init__.py diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/providers/src/airflow/providers/google/marketing_platform/operators/analytics.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/analytics.py rename to providers/src/airflow/providers/google/marketing_platform/operators/analytics.py diff --git a/airflow/providers/google/marketing_platform/operators/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/operators/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/operators/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/operators/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/operators/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/providers/src/airflow/providers/google/marketing_platform/operators/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/operators/display_video.py diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/providers/src/airflow/providers/google/marketing_platform/operators/search_ads.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/search_ads.py rename to providers/src/airflow/providers/google/marketing_platform/operators/search_ads.py diff --git a/airflow/providers/google/marketing_platform/sensors/__init__.py b/providers/src/airflow/providers/google/marketing_platform/sensors/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/__init__.py diff --git a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/sensors/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/sensors/display_video.py b/providers/src/airflow/providers/google/marketing_platform/sensors/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/display_video.py diff --git a/airflow/providers/google/provider.yaml b/providers/src/airflow/providers/google/provider.yaml similarity index 100% rename from airflow/providers/google/provider.yaml rename to providers/src/airflow/providers/google/provider.yaml diff --git a/airflow/providers/google/cloud/utils/__init__.py b/providers/src/airflow/providers/google/suite/__init__.py similarity index 100% rename from airflow/providers/google/cloud/utils/__init__.py rename to providers/src/airflow/providers/google/suite/__init__.py diff --git a/airflow/providers/google/suite/__init__.py b/providers/src/airflow/providers/google/suite/hooks/__init__.py similarity index 100% rename from airflow/providers/google/suite/__init__.py rename to providers/src/airflow/providers/google/suite/hooks/__init__.py diff --git a/airflow/providers/google/suite/hooks/calendar.py b/providers/src/airflow/providers/google/suite/hooks/calendar.py similarity index 100% rename from airflow/providers/google/suite/hooks/calendar.py rename to providers/src/airflow/providers/google/suite/hooks/calendar.py diff --git a/airflow/providers/google/suite/hooks/drive.py b/providers/src/airflow/providers/google/suite/hooks/drive.py similarity index 100% rename from airflow/providers/google/suite/hooks/drive.py rename to providers/src/airflow/providers/google/suite/hooks/drive.py diff --git a/airflow/providers/google/suite/hooks/sheets.py b/providers/src/airflow/providers/google/suite/hooks/sheets.py similarity index 100% rename from airflow/providers/google/suite/hooks/sheets.py rename to providers/src/airflow/providers/google/suite/hooks/sheets.py diff --git a/airflow/providers/google/suite/hooks/__init__.py b/providers/src/airflow/providers/google/suite/operators/__init__.py similarity index 100% rename from airflow/providers/google/suite/hooks/__init__.py rename to providers/src/airflow/providers/google/suite/operators/__init__.py diff --git a/airflow/providers/google/suite/operators/sheets.py b/providers/src/airflow/providers/google/suite/operators/sheets.py similarity index 100% rename from airflow/providers/google/suite/operators/sheets.py rename to providers/src/airflow/providers/google/suite/operators/sheets.py diff --git a/airflow/providers/google/suite/sensors/__init__.py b/providers/src/airflow/providers/google/suite/sensors/__init__.py similarity index 100% rename from airflow/providers/google/suite/sensors/__init__.py rename to providers/src/airflow/providers/google/suite/sensors/__init__.py diff --git a/airflow/providers/google/suite/sensors/drive.py b/providers/src/airflow/providers/google/suite/sensors/drive.py similarity index 100% rename from airflow/providers/google/suite/sensors/drive.py rename to providers/src/airflow/providers/google/suite/sensors/drive.py diff --git a/airflow/providers/google/suite/transfers/__init__.py b/providers/src/airflow/providers/google/suite/transfers/__init__.py similarity index 100% rename from airflow/providers/google/suite/transfers/__init__.py rename to providers/src/airflow/providers/google/suite/transfers/__init__.py diff --git a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py b/providers/src/airflow/providers/google/suite/transfers/gcs_to_gdrive.py similarity index 100% rename from airflow/providers/google/suite/transfers/gcs_to_gdrive.py rename to providers/src/airflow/providers/google/suite/transfers/gcs_to_gdrive.py diff --git a/airflow/providers/google/suite/transfers/gcs_to_sheets.py b/providers/src/airflow/providers/google/suite/transfers/gcs_to_sheets.py similarity index 100% rename from airflow/providers/google/suite/transfers/gcs_to_sheets.py rename to providers/src/airflow/providers/google/suite/transfers/gcs_to_sheets.py diff --git a/airflow/providers/google/suite/transfers/local_to_drive.py b/providers/src/airflow/providers/google/suite/transfers/local_to_drive.py similarity index 100% rename from airflow/providers/google/suite/transfers/local_to_drive.py rename to providers/src/airflow/providers/google/suite/transfers/local_to_drive.py diff --git a/airflow/providers/google/suite/transfers/sql_to_sheets.py b/providers/src/airflow/providers/google/suite/transfers/sql_to_sheets.py similarity index 100% rename from airflow/providers/google/suite/transfers/sql_to_sheets.py rename to providers/src/airflow/providers/google/suite/transfers/sql_to_sheets.py diff --git a/airflow/providers/grpc/.latest-doc-only-change.txt b/providers/src/airflow/providers/grpc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/grpc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/grpc/.latest-doc-only-change.txt diff --git a/airflow/providers/grpc/CHANGELOG.rst b/providers/src/airflow/providers/grpc/CHANGELOG.rst similarity index 100% rename from airflow/providers/grpc/CHANGELOG.rst rename to providers/src/airflow/providers/grpc/CHANGELOG.rst diff --git a/airflow/providers/grpc/__init__.py b/providers/src/airflow/providers/grpc/__init__.py similarity index 100% rename from airflow/providers/grpc/__init__.py rename to providers/src/airflow/providers/grpc/__init__.py diff --git a/airflow/providers/google/suite/operators/__init__.py b/providers/src/airflow/providers/grpc/hooks/__init__.py similarity index 100% rename from airflow/providers/google/suite/operators/__init__.py rename to providers/src/airflow/providers/grpc/hooks/__init__.py diff --git a/airflow/providers/grpc/hooks/grpc.py b/providers/src/airflow/providers/grpc/hooks/grpc.py similarity index 100% rename from airflow/providers/grpc/hooks/grpc.py rename to providers/src/airflow/providers/grpc/hooks/grpc.py diff --git a/airflow/providers/grpc/hooks/__init__.py b/providers/src/airflow/providers/grpc/operators/__init__.py similarity index 100% rename from airflow/providers/grpc/hooks/__init__.py rename to providers/src/airflow/providers/grpc/operators/__init__.py diff --git a/airflow/providers/grpc/operators/grpc.py b/providers/src/airflow/providers/grpc/operators/grpc.py similarity index 100% rename from airflow/providers/grpc/operators/grpc.py rename to providers/src/airflow/providers/grpc/operators/grpc.py diff --git a/airflow/providers/grpc/provider.yaml b/providers/src/airflow/providers/grpc/provider.yaml similarity index 100% rename from airflow/providers/grpc/provider.yaml rename to providers/src/airflow/providers/grpc/provider.yaml diff --git a/airflow/providers/hashicorp/.latest-doc-only-change.txt b/providers/src/airflow/providers/hashicorp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/hashicorp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/hashicorp/.latest-doc-only-change.txt diff --git a/airflow/providers/hashicorp/CHANGELOG.rst b/providers/src/airflow/providers/hashicorp/CHANGELOG.rst similarity index 100% rename from airflow/providers/hashicorp/CHANGELOG.rst rename to providers/src/airflow/providers/hashicorp/CHANGELOG.rst diff --git a/airflow/providers/hashicorp/__init__.py b/providers/src/airflow/providers/hashicorp/__init__.py similarity index 100% rename from airflow/providers/hashicorp/__init__.py rename to providers/src/airflow/providers/hashicorp/__init__.py diff --git a/airflow/providers/hashicorp/_internal_client/__init__.py b/providers/src/airflow/providers/hashicorp/_internal_client/__init__.py similarity index 100% rename from airflow/providers/hashicorp/_internal_client/__init__.py rename to providers/src/airflow/providers/hashicorp/_internal_client/__init__.py diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py b/providers/src/airflow/providers/hashicorp/_internal_client/vault_client.py similarity index 100% rename from airflow/providers/hashicorp/_internal_client/vault_client.py rename to providers/src/airflow/providers/hashicorp/_internal_client/vault_client.py diff --git a/airflow/providers/hashicorp/hooks/__init__.py b/providers/src/airflow/providers/hashicorp/hooks/__init__.py similarity index 100% rename from airflow/providers/hashicorp/hooks/__init__.py rename to providers/src/airflow/providers/hashicorp/hooks/__init__.py diff --git a/airflow/providers/hashicorp/hooks/vault.py b/providers/src/airflow/providers/hashicorp/hooks/vault.py similarity index 100% rename from airflow/providers/hashicorp/hooks/vault.py rename to providers/src/airflow/providers/hashicorp/hooks/vault.py diff --git a/airflow/providers/hashicorp/provider.yaml b/providers/src/airflow/providers/hashicorp/provider.yaml similarity index 100% rename from airflow/providers/hashicorp/provider.yaml rename to providers/src/airflow/providers/hashicorp/provider.yaml diff --git a/airflow/providers/hashicorp/secrets/__init__.py b/providers/src/airflow/providers/hashicorp/secrets/__init__.py similarity index 100% rename from airflow/providers/hashicorp/secrets/__init__.py rename to providers/src/airflow/providers/hashicorp/secrets/__init__.py diff --git a/airflow/providers/hashicorp/secrets/vault.py b/providers/src/airflow/providers/hashicorp/secrets/vault.py similarity index 100% rename from airflow/providers/hashicorp/secrets/vault.py rename to providers/src/airflow/providers/hashicorp/secrets/vault.py diff --git a/airflow/providers/http/.latest-doc-only-change.txt b/providers/src/airflow/providers/http/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/http/.latest-doc-only-change.txt rename to providers/src/airflow/providers/http/.latest-doc-only-change.txt diff --git a/airflow/providers/http/CHANGELOG.rst b/providers/src/airflow/providers/http/CHANGELOG.rst similarity index 100% rename from airflow/providers/http/CHANGELOG.rst rename to providers/src/airflow/providers/http/CHANGELOG.rst diff --git a/airflow/providers/http/__init__.py b/providers/src/airflow/providers/http/__init__.py similarity index 100% rename from airflow/providers/http/__init__.py rename to providers/src/airflow/providers/http/__init__.py diff --git a/airflow/providers/grpc/operators/__init__.py b/providers/src/airflow/providers/http/hooks/__init__.py similarity index 100% rename from airflow/providers/grpc/operators/__init__.py rename to providers/src/airflow/providers/http/hooks/__init__.py diff --git a/airflow/providers/http/hooks/http.py b/providers/src/airflow/providers/http/hooks/http.py similarity index 100% rename from airflow/providers/http/hooks/http.py rename to providers/src/airflow/providers/http/hooks/http.py diff --git a/airflow/providers/http/hooks/__init__.py b/providers/src/airflow/providers/http/operators/__init__.py similarity index 100% rename from airflow/providers/http/hooks/__init__.py rename to providers/src/airflow/providers/http/operators/__init__.py diff --git a/airflow/providers/http/operators/http.py b/providers/src/airflow/providers/http/operators/http.py similarity index 100% rename from airflow/providers/http/operators/http.py rename to providers/src/airflow/providers/http/operators/http.py diff --git a/airflow/providers/http/provider.yaml b/providers/src/airflow/providers/http/provider.yaml similarity index 100% rename from airflow/providers/http/provider.yaml rename to providers/src/airflow/providers/http/provider.yaml diff --git a/airflow/providers/http/operators/__init__.py b/providers/src/airflow/providers/http/sensors/__init__.py similarity index 100% rename from airflow/providers/http/operators/__init__.py rename to providers/src/airflow/providers/http/sensors/__init__.py diff --git a/airflow/providers/http/sensors/http.py b/providers/src/airflow/providers/http/sensors/http.py similarity index 100% rename from airflow/providers/http/sensors/http.py rename to providers/src/airflow/providers/http/sensors/http.py diff --git a/airflow/providers/http/sensors/__init__.py b/providers/src/airflow/providers/http/triggers/__init__.py similarity index 100% rename from airflow/providers/http/sensors/__init__.py rename to providers/src/airflow/providers/http/triggers/__init__.py diff --git a/airflow/providers/http/triggers/http.py b/providers/src/airflow/providers/http/triggers/http.py similarity index 100% rename from airflow/providers/http/triggers/http.py rename to providers/src/airflow/providers/http/triggers/http.py diff --git a/airflow/providers/imap/.latest-doc-only-change.txt b/providers/src/airflow/providers/imap/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/imap/.latest-doc-only-change.txt rename to providers/src/airflow/providers/imap/.latest-doc-only-change.txt diff --git a/airflow/providers/imap/CHANGELOG.rst b/providers/src/airflow/providers/imap/CHANGELOG.rst similarity index 100% rename from airflow/providers/imap/CHANGELOG.rst rename to providers/src/airflow/providers/imap/CHANGELOG.rst diff --git a/airflow/providers/imap/__init__.py b/providers/src/airflow/providers/imap/__init__.py similarity index 100% rename from airflow/providers/imap/__init__.py rename to providers/src/airflow/providers/imap/__init__.py diff --git a/airflow/providers/http/triggers/__init__.py b/providers/src/airflow/providers/imap/hooks/__init__.py similarity index 100% rename from airflow/providers/http/triggers/__init__.py rename to providers/src/airflow/providers/imap/hooks/__init__.py diff --git a/airflow/providers/imap/hooks/imap.py b/providers/src/airflow/providers/imap/hooks/imap.py similarity index 100% rename from airflow/providers/imap/hooks/imap.py rename to providers/src/airflow/providers/imap/hooks/imap.py diff --git a/airflow/providers/imap/provider.yaml b/providers/src/airflow/providers/imap/provider.yaml similarity index 100% rename from airflow/providers/imap/provider.yaml rename to providers/src/airflow/providers/imap/provider.yaml diff --git a/airflow/providers/imap/hooks/__init__.py b/providers/src/airflow/providers/imap/sensors/__init__.py similarity index 100% rename from airflow/providers/imap/hooks/__init__.py rename to providers/src/airflow/providers/imap/sensors/__init__.py diff --git a/airflow/providers/imap/sensors/imap_attachment.py b/providers/src/airflow/providers/imap/sensors/imap_attachment.py similarity index 100% rename from airflow/providers/imap/sensors/imap_attachment.py rename to providers/src/airflow/providers/imap/sensors/imap_attachment.py diff --git a/airflow/providers/influxdb/.latest-doc-only-change.txt b/providers/src/airflow/providers/influxdb/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/influxdb/.latest-doc-only-change.txt rename to providers/src/airflow/providers/influxdb/.latest-doc-only-change.txt diff --git a/airflow/providers/influxdb/CHANGELOG.rst b/providers/src/airflow/providers/influxdb/CHANGELOG.rst similarity index 100% rename from airflow/providers/influxdb/CHANGELOG.rst rename to providers/src/airflow/providers/influxdb/CHANGELOG.rst diff --git a/airflow/providers/influxdb/__init__.py b/providers/src/airflow/providers/influxdb/__init__.py similarity index 100% rename from airflow/providers/influxdb/__init__.py rename to providers/src/airflow/providers/influxdb/__init__.py diff --git a/airflow/providers/influxdb/hooks/__init__.py b/providers/src/airflow/providers/influxdb/hooks/__init__.py similarity index 100% rename from airflow/providers/influxdb/hooks/__init__.py rename to providers/src/airflow/providers/influxdb/hooks/__init__.py diff --git a/airflow/providers/influxdb/hooks/influxdb.py b/providers/src/airflow/providers/influxdb/hooks/influxdb.py similarity index 100% rename from airflow/providers/influxdb/hooks/influxdb.py rename to providers/src/airflow/providers/influxdb/hooks/influxdb.py diff --git a/airflow/providers/influxdb/operators/__init__.py b/providers/src/airflow/providers/influxdb/operators/__init__.py similarity index 100% rename from airflow/providers/influxdb/operators/__init__.py rename to providers/src/airflow/providers/influxdb/operators/__init__.py diff --git a/airflow/providers/influxdb/operators/influxdb.py b/providers/src/airflow/providers/influxdb/operators/influxdb.py similarity index 100% rename from airflow/providers/influxdb/operators/influxdb.py rename to providers/src/airflow/providers/influxdb/operators/influxdb.py diff --git a/airflow/providers/influxdb/provider.yaml b/providers/src/airflow/providers/influxdb/provider.yaml similarity index 100% rename from airflow/providers/influxdb/provider.yaml rename to providers/src/airflow/providers/influxdb/provider.yaml diff --git a/airflow/providers/jdbc/.latest-doc-only-change.txt b/providers/src/airflow/providers/jdbc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/jdbc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/jdbc/.latest-doc-only-change.txt diff --git a/airflow/providers/jdbc/CHANGELOG.rst b/providers/src/airflow/providers/jdbc/CHANGELOG.rst similarity index 100% rename from airflow/providers/jdbc/CHANGELOG.rst rename to providers/src/airflow/providers/jdbc/CHANGELOG.rst diff --git a/airflow/providers/jdbc/__init__.py b/providers/src/airflow/providers/jdbc/__init__.py similarity index 100% rename from airflow/providers/jdbc/__init__.py rename to providers/src/airflow/providers/jdbc/__init__.py diff --git a/airflow/providers/imap/sensors/__init__.py b/providers/src/airflow/providers/jdbc/hooks/__init__.py similarity index 100% rename from airflow/providers/imap/sensors/__init__.py rename to providers/src/airflow/providers/jdbc/hooks/__init__.py diff --git a/airflow/providers/jdbc/hooks/jdbc.py b/providers/src/airflow/providers/jdbc/hooks/jdbc.py similarity index 100% rename from airflow/providers/jdbc/hooks/jdbc.py rename to providers/src/airflow/providers/jdbc/hooks/jdbc.py diff --git a/airflow/providers/jdbc/hooks/__init__.py b/providers/src/airflow/providers/jdbc/operators/__init__.py similarity index 100% rename from airflow/providers/jdbc/hooks/__init__.py rename to providers/src/airflow/providers/jdbc/operators/__init__.py diff --git a/airflow/providers/jdbc/operators/jdbc.py b/providers/src/airflow/providers/jdbc/operators/jdbc.py similarity index 100% rename from airflow/providers/jdbc/operators/jdbc.py rename to providers/src/airflow/providers/jdbc/operators/jdbc.py diff --git a/airflow/providers/jdbc/provider.yaml b/providers/src/airflow/providers/jdbc/provider.yaml similarity index 100% rename from airflow/providers/jdbc/provider.yaml rename to providers/src/airflow/providers/jdbc/provider.yaml diff --git a/airflow/providers/jenkins/.latest-doc-only-change.txt b/providers/src/airflow/providers/jenkins/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/jenkins/.latest-doc-only-change.txt rename to providers/src/airflow/providers/jenkins/.latest-doc-only-change.txt diff --git a/airflow/providers/jenkins/CHANGELOG.rst b/providers/src/airflow/providers/jenkins/CHANGELOG.rst similarity index 100% rename from airflow/providers/jenkins/CHANGELOG.rst rename to providers/src/airflow/providers/jenkins/CHANGELOG.rst diff --git a/airflow/providers/jenkins/__init__.py b/providers/src/airflow/providers/jenkins/__init__.py similarity index 100% rename from airflow/providers/jenkins/__init__.py rename to providers/src/airflow/providers/jenkins/__init__.py diff --git a/airflow/providers/jdbc/operators/__init__.py b/providers/src/airflow/providers/jenkins/hooks/__init__.py similarity index 100% rename from airflow/providers/jdbc/operators/__init__.py rename to providers/src/airflow/providers/jenkins/hooks/__init__.py diff --git a/airflow/providers/jenkins/hooks/jenkins.py b/providers/src/airflow/providers/jenkins/hooks/jenkins.py similarity index 100% rename from airflow/providers/jenkins/hooks/jenkins.py rename to providers/src/airflow/providers/jenkins/hooks/jenkins.py diff --git a/airflow/providers/jenkins/hooks/__init__.py b/providers/src/airflow/providers/jenkins/operators/__init__.py similarity index 100% rename from airflow/providers/jenkins/hooks/__init__.py rename to providers/src/airflow/providers/jenkins/operators/__init__.py diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/providers/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py similarity index 100% rename from airflow/providers/jenkins/operators/jenkins_job_trigger.py rename to providers/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py diff --git a/airflow/providers/jenkins/provider.yaml b/providers/src/airflow/providers/jenkins/provider.yaml similarity index 100% rename from airflow/providers/jenkins/provider.yaml rename to providers/src/airflow/providers/jenkins/provider.yaml diff --git a/airflow/providers/jenkins/operators/__init__.py b/providers/src/airflow/providers/jenkins/sensors/__init__.py similarity index 100% rename from airflow/providers/jenkins/operators/__init__.py rename to providers/src/airflow/providers/jenkins/sensors/__init__.py diff --git a/airflow/providers/jenkins/sensors/jenkins.py b/providers/src/airflow/providers/jenkins/sensors/jenkins.py similarity index 100% rename from airflow/providers/jenkins/sensors/jenkins.py rename to providers/src/airflow/providers/jenkins/sensors/jenkins.py diff --git a/airflow/providers/microsoft/azure/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/azure/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/azure/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/azure/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/azure/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/azure/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst diff --git a/airflow/providers/microsoft/azure/__init__.py b/providers/src/airflow/providers/microsoft/azure/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/__init__.py rename to providers/src/airflow/providers/microsoft/azure/__init__.py diff --git a/airflow/providers/microsoft/azure/fs/__init__.py b/providers/src/airflow/providers/microsoft/azure/fs/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/fs/__init__.py rename to providers/src/airflow/providers/microsoft/azure/fs/__init__.py diff --git a/airflow/providers/microsoft/azure/fs/adls.py b/providers/src/airflow/providers/microsoft/azure/fs/adls.py similarity index 100% rename from airflow/providers/microsoft/azure/fs/adls.py rename to providers/src/airflow/providers/microsoft/azure/fs/adls.py diff --git a/airflow/providers/jenkins/sensors/__init__.py b/providers/src/airflow/providers/microsoft/azure/hooks/__init__.py similarity index 100% rename from airflow/providers/jenkins/sensors/__init__.py rename to providers/src/airflow/providers/microsoft/azure/hooks/__init__.py diff --git a/airflow/providers/microsoft/azure/hooks/adx.py b/providers/src/airflow/providers/microsoft/azure/hooks/adx.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/adx.py rename to providers/src/airflow/providers/microsoft/azure/hooks/adx.py diff --git a/airflow/providers/microsoft/azure/hooks/asb.py b/providers/src/airflow/providers/microsoft/azure/hooks/asb.py similarity index 96% rename from airflow/providers/microsoft/azure/hooks/asb.py rename to providers/src/airflow/providers/microsoft/azure/hooks/asb.py index c90833f52fea..317447d11170 100644 --- a/airflow/providers/microsoft/azure/hooks/asb.py +++ b/providers/src/airflow/providers/microsoft/azure/hooks/asb.py @@ -251,9 +251,11 @@ def send_message(self, queue_name: str, messages: str | list[str], batch_message raise TypeError("Queue name cannot be None.") if not messages: raise ValueError("Messages list cannot be empty.") - with self.get_conn() as service_bus_client, service_bus_client.get_queue_sender( - queue_name=queue_name - ) as sender, sender: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_queue_sender(queue_name=queue_name) as sender, + sender, + ): if isinstance(messages, str): if not batch_message_flag: msg = ServiceBusMessage(messages) @@ -298,9 +300,11 @@ def receive_message( if queue_name is None: raise TypeError("Queue name cannot be None.") - with self.get_conn() as service_bus_client, service_bus_client.get_queue_receiver( - queue_name=queue_name - ) as receiver, receiver: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_queue_receiver(queue_name=queue_name) as receiver, + receiver, + ): received_msgs = receiver.receive_messages( max_message_count=max_message_count, max_wait_time=max_wait_time ) @@ -335,9 +339,13 @@ def receive_subscription_message( raise TypeError("Subscription name cannot be None.") if topic_name is None: raise TypeError("Topic name cannot be None.") - with self.get_conn() as service_bus_client, service_bus_client.get_subscription_receiver( - topic_name, subscription_name - ) as subscription_receiver, subscription_receiver: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_subscription_receiver( + topic_name, subscription_name + ) as subscription_receiver, + subscription_receiver, + ): received_msgs = subscription_receiver.receive_messages( max_message_count=max_message_count, max_wait_time=max_wait_time ) diff --git a/airflow/providers/microsoft/azure/hooks/base_azure.py b/providers/src/airflow/providers/microsoft/azure/hooks/base_azure.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/base_azure.py rename to providers/src/airflow/providers/microsoft/azure/hooks/base_azure.py diff --git a/airflow/providers/microsoft/azure/hooks/batch.py b/providers/src/airflow/providers/microsoft/azure/hooks/batch.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/batch.py rename to providers/src/airflow/providers/microsoft/azure/hooks/batch.py diff --git a/airflow/providers/microsoft/azure/hooks/container_instance.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_instance.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_instance.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_instance.py diff --git a/airflow/providers/microsoft/azure/hooks/container_registry.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_registry.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_registry.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_registry.py diff --git a/airflow/providers/microsoft/azure/hooks/container_volume.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_volume.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_volume.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_volume.py diff --git a/airflow/providers/microsoft/azure/hooks/cosmos.py b/providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py diff --git a/airflow/providers/microsoft/azure/hooks/data_factory.py b/providers/src/airflow/providers/microsoft/azure/hooks/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/hooks/data_factory.py diff --git a/airflow/providers/microsoft/azure/hooks/data_lake.py b/providers/src/airflow/providers/microsoft/azure/hooks/data_lake.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/data_lake.py rename to providers/src/airflow/providers/microsoft/azure/hooks/data_lake.py diff --git a/airflow/providers/microsoft/azure/hooks/fileshare.py b/providers/src/airflow/providers/microsoft/azure/hooks/fileshare.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/fileshare.py rename to providers/src/airflow/providers/microsoft/azure/hooks/fileshare.py diff --git a/airflow/providers/microsoft/azure/hooks/msgraph.py b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py diff --git a/airflow/providers/microsoft/azure/hooks/powerbi.py b/providers/src/airflow/providers/microsoft/azure/hooks/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/hooks/powerbi.py diff --git a/airflow/providers/microsoft/azure/hooks/synapse.py b/providers/src/airflow/providers/microsoft/azure/hooks/synapse.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/synapse.py rename to providers/src/airflow/providers/microsoft/azure/hooks/synapse.py diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py b/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/wasb.py rename to providers/src/airflow/providers/microsoft/azure/hooks/wasb.py diff --git a/airflow/providers/microsoft/azure/log/__init__.py b/providers/src/airflow/providers/microsoft/azure/log/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/log/__init__.py rename to providers/src/airflow/providers/microsoft/azure/log/__init__.py diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/providers/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py similarity index 100% rename from airflow/providers/microsoft/azure/log/wasb_task_handler.py rename to providers/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py diff --git a/airflow/providers/microsoft/__init__.py b/providers/src/airflow/providers/microsoft/azure/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/__init__.py rename to providers/src/airflow/providers/microsoft/azure/operators/__init__.py diff --git a/airflow/providers/microsoft/azure/operators/adls.py b/providers/src/airflow/providers/microsoft/azure/operators/adls.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/adls.py rename to providers/src/airflow/providers/microsoft/azure/operators/adls.py diff --git a/airflow/providers/microsoft/azure/operators/adx.py b/providers/src/airflow/providers/microsoft/azure/operators/adx.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/adx.py rename to providers/src/airflow/providers/microsoft/azure/operators/adx.py diff --git a/airflow/providers/microsoft/azure/operators/asb.py b/providers/src/airflow/providers/microsoft/azure/operators/asb.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/asb.py rename to providers/src/airflow/providers/microsoft/azure/operators/asb.py diff --git a/airflow/providers/microsoft/azure/operators/batch.py b/providers/src/airflow/providers/microsoft/azure/operators/batch.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/batch.py rename to providers/src/airflow/providers/microsoft/azure/operators/batch.py diff --git a/airflow/providers/microsoft/azure/operators/container_instances.py b/providers/src/airflow/providers/microsoft/azure/operators/container_instances.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/container_instances.py rename to providers/src/airflow/providers/microsoft/azure/operators/container_instances.py diff --git a/airflow/providers/microsoft/azure/operators/cosmos.py b/providers/src/airflow/providers/microsoft/azure/operators/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/operators/cosmos.py diff --git a/airflow/providers/microsoft/azure/operators/data_factory.py b/providers/src/airflow/providers/microsoft/azure/operators/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/operators/data_factory.py diff --git a/airflow/providers/microsoft/azure/operators/msgraph.py b/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/operators/msgraph.py diff --git a/airflow/providers/microsoft/azure/operators/powerbi.py b/providers/src/airflow/providers/microsoft/azure/operators/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/operators/powerbi.py diff --git a/airflow/providers/microsoft/azure/operators/synapse.py b/providers/src/airflow/providers/microsoft/azure/operators/synapse.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/synapse.py rename to providers/src/airflow/providers/microsoft/azure/operators/synapse.py diff --git a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py b/providers/src/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/wasb_delete_blob.py rename to providers/src/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py diff --git a/airflow/providers/microsoft/azure/provider.yaml b/providers/src/airflow/providers/microsoft/azure/provider.yaml similarity index 100% rename from airflow/providers/microsoft/azure/provider.yaml rename to providers/src/airflow/providers/microsoft/azure/provider.yaml diff --git a/airflow/providers/microsoft/azure/secrets/__init__.py b/providers/src/airflow/providers/microsoft/azure/secrets/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/secrets/__init__.py rename to providers/src/airflow/providers/microsoft/azure/secrets/__init__.py diff --git a/airflow/providers/microsoft/azure/secrets/key_vault.py b/providers/src/airflow/providers/microsoft/azure/secrets/key_vault.py similarity index 100% rename from airflow/providers/microsoft/azure/secrets/key_vault.py rename to providers/src/airflow/providers/microsoft/azure/secrets/key_vault.py diff --git a/airflow/providers/microsoft/azure/hooks/__init__.py b/providers/src/airflow/providers/microsoft/azure/sensors/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/azure/sensors/__init__.py diff --git a/airflow/providers/microsoft/azure/sensors/cosmos.py b/providers/src/airflow/providers/microsoft/azure/sensors/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/sensors/cosmos.py diff --git a/airflow/providers/microsoft/azure/sensors/data_factory.py b/providers/src/airflow/providers/microsoft/azure/sensors/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/sensors/data_factory.py diff --git a/airflow/providers/microsoft/azure/sensors/msgraph.py b/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py diff --git a/airflow/providers/microsoft/azure/sensors/wasb.py b/providers/src/airflow/providers/microsoft/azure/sensors/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/wasb.py rename to providers/src/airflow/providers/microsoft/azure/sensors/wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/__init__.py b/providers/src/airflow/providers/microsoft/azure/transfers/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/__init__.py rename to providers/src/airflow/providers/microsoft/azure/transfers/__init__.py diff --git a/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py b/providers/src/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py rename to providers/src/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py diff --git a/airflow/providers/microsoft/azure/transfers/local_to_adls.py b/providers/src/airflow/providers/microsoft/azure/transfers/local_to_adls.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/local_to_adls.py rename to providers/src/airflow/providers/microsoft/azure/transfers/local_to_adls.py diff --git a/airflow/providers/microsoft/azure/transfers/local_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/local_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/local_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/local_to_wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py b/providers/src/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py rename to providers/src/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py diff --git a/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/s3_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py diff --git a/airflow/providers/microsoft/azure/triggers/__init__.py b/providers/src/airflow/providers/microsoft/azure/triggers/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/__init__.py rename to providers/src/airflow/providers/microsoft/azure/triggers/__init__.py diff --git a/airflow/providers/microsoft/azure/triggers/data_factory.py b/providers/src/airflow/providers/microsoft/azure/triggers/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/triggers/data_factory.py diff --git a/airflow/providers/microsoft/azure/triggers/msgraph.py b/providers/src/airflow/providers/microsoft/azure/triggers/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/triggers/msgraph.py diff --git a/airflow/providers/microsoft/azure/triggers/powerbi.py b/providers/src/airflow/providers/microsoft/azure/triggers/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/triggers/powerbi.py diff --git a/airflow/providers/microsoft/azure/triggers/wasb.py b/providers/src/airflow/providers/microsoft/azure/triggers/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/wasb.py rename to providers/src/airflow/providers/microsoft/azure/triggers/wasb.py diff --git a/airflow/providers/microsoft/azure/utils.py b/providers/src/airflow/providers/microsoft/azure/utils.py similarity index 100% rename from airflow/providers/microsoft/azure/utils.py rename to providers/src/airflow/providers/microsoft/azure/utils.py diff --git a/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/mssql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/mssql/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/mssql/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst diff --git a/airflow/providers/microsoft/mssql/__init__.py b/providers/src/airflow/providers/microsoft/mssql/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/__init__.py diff --git a/airflow/providers/microsoft/azure/operators/__init__.py b/providers/src/airflow/providers/microsoft/mssql/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/hooks/__init__.py diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/providers/src/airflow/providers/microsoft/mssql/hooks/mssql.py similarity index 100% rename from airflow/providers/microsoft/mssql/hooks/mssql.py rename to providers/src/airflow/providers/microsoft/mssql/hooks/mssql.py diff --git a/airflow/providers/microsoft/azure/sensors/__init__.py b/providers/src/airflow/providers/microsoft/mssql/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/operators/__init__.py diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/providers/src/airflow/providers/microsoft/mssql/operators/mssql.py similarity index 100% rename from airflow/providers/microsoft/mssql/operators/mssql.py rename to providers/src/airflow/providers/microsoft/mssql/operators/mssql.py diff --git a/airflow/providers/microsoft/mssql/provider.yaml b/providers/src/airflow/providers/microsoft/mssql/provider.yaml similarity index 100% rename from airflow/providers/microsoft/mssql/provider.yaml rename to providers/src/airflow/providers/microsoft/mssql/provider.yaml diff --git a/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/psrp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/psrp/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/psrp/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst diff --git a/airflow/providers/microsoft/psrp/__init__.py b/providers/src/airflow/providers/microsoft/psrp/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/__init__.py diff --git a/airflow/providers/microsoft/mssql/hooks/__init__.py b/providers/src/airflow/providers/microsoft/psrp/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/hooks/__init__.py diff --git a/airflow/providers/microsoft/psrp/hooks/psrp.py b/providers/src/airflow/providers/microsoft/psrp/hooks/psrp.py similarity index 100% rename from airflow/providers/microsoft/psrp/hooks/psrp.py rename to providers/src/airflow/providers/microsoft/psrp/hooks/psrp.py diff --git a/airflow/providers/microsoft/mssql/operators/__init__.py b/providers/src/airflow/providers/microsoft/psrp/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/operators/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/operators/__init__.py diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py b/providers/src/airflow/providers/microsoft/psrp/operators/psrp.py similarity index 93% rename from airflow/providers/microsoft/psrp/operators/psrp.py rename to providers/src/airflow/providers/microsoft/psrp/operators/psrp.py index dd7da36a9e15..6927159a44ea 100644 --- a/airflow/providers/microsoft/psrp/operators/psrp.py +++ b/providers/src/airflow/providers/microsoft/psrp/operators/psrp.py @@ -17,8 +17,9 @@ # under the License. from __future__ import annotations +from collections.abc import Sequence from logging import DEBUG -from typing import TYPE_CHECKING, Any, Sequence +from typing import TYPE_CHECKING, Any from jinja2.nativetypes import NativeEnvironment from pypsrp.serializer import TaggedValue @@ -128,13 +129,16 @@ def __init__( self.psrp_session_init = psrp_session_init def execute(self, context: Context) -> list[Any] | None: - with PsrpHook( - self.conn_id, - logging_level=self.logging_level, - runspace_options=self.runspace_options, - wsman_options=self.wsman_options, - on_output_callback=self.log.info if not self.do_xcom_push else None, - ) as hook, hook.invoke() as ps: + with ( + PsrpHook( + self.conn_id, + logging_level=self.logging_level, + runspace_options=self.runspace_options, + wsman_options=self.wsman_options, + on_output_callback=self.log.info if not self.do_xcom_push else None, + ) as hook, + hook.invoke() as ps, + ): if self.psrp_session_init is not None: ps.add_command(self.psrp_session_init) if self.command: diff --git a/airflow/providers/microsoft/psrp/provider.yaml b/providers/src/airflow/providers/microsoft/psrp/provider.yaml similarity index 100% rename from airflow/providers/microsoft/psrp/provider.yaml rename to providers/src/airflow/providers/microsoft/psrp/provider.yaml diff --git a/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/winrm/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/winrm/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/winrm/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst diff --git a/airflow/providers/microsoft/winrm/__init__.py b/providers/src/airflow/providers/microsoft/winrm/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/__init__.py diff --git a/airflow/providers/microsoft/psrp/hooks/__init__.py b/providers/src/airflow/providers/microsoft/winrm/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/hooks/__init__.py diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py b/providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py similarity index 100% rename from airflow/providers/microsoft/winrm/hooks/winrm.py rename to providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py diff --git a/airflow/providers/microsoft/psrp/operators/__init__.py b/providers/src/airflow/providers/microsoft/winrm/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/operators/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/operators/__init__.py diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/providers/src/airflow/providers/microsoft/winrm/operators/winrm.py similarity index 100% rename from airflow/providers/microsoft/winrm/operators/winrm.py rename to providers/src/airflow/providers/microsoft/winrm/operators/winrm.py diff --git a/airflow/providers/microsoft/winrm/provider.yaml b/providers/src/airflow/providers/microsoft/winrm/provider.yaml similarity index 100% rename from airflow/providers/microsoft/winrm/provider.yaml rename to providers/src/airflow/providers/microsoft/winrm/provider.yaml diff --git a/airflow/providers/mongo/.latest-doc-only-change.txt b/providers/src/airflow/providers/mongo/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/mongo/.latest-doc-only-change.txt rename to providers/src/airflow/providers/mongo/.latest-doc-only-change.txt diff --git a/airflow/providers/mongo/CHANGELOG.rst b/providers/src/airflow/providers/mongo/CHANGELOG.rst similarity index 100% rename from airflow/providers/mongo/CHANGELOG.rst rename to providers/src/airflow/providers/mongo/CHANGELOG.rst diff --git a/airflow/providers/mongo/__init__.py b/providers/src/airflow/providers/mongo/__init__.py similarity index 100% rename from airflow/providers/mongo/__init__.py rename to providers/src/airflow/providers/mongo/__init__.py diff --git a/airflow/providers/microsoft/winrm/hooks/__init__.py b/providers/src/airflow/providers/mongo/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/hooks/__init__.py rename to providers/src/airflow/providers/mongo/hooks/__init__.py diff --git a/airflow/providers/mongo/hooks/mongo.py b/providers/src/airflow/providers/mongo/hooks/mongo.py similarity index 100% rename from airflow/providers/mongo/hooks/mongo.py rename to providers/src/airflow/providers/mongo/hooks/mongo.py diff --git a/airflow/providers/mongo/provider.yaml b/providers/src/airflow/providers/mongo/provider.yaml similarity index 100% rename from airflow/providers/mongo/provider.yaml rename to providers/src/airflow/providers/mongo/provider.yaml diff --git a/airflow/providers/microsoft/winrm/operators/__init__.py b/providers/src/airflow/providers/mongo/sensors/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/operators/__init__.py rename to providers/src/airflow/providers/mongo/sensors/__init__.py diff --git a/airflow/providers/mongo/sensors/mongo.py b/providers/src/airflow/providers/mongo/sensors/mongo.py similarity index 100% rename from airflow/providers/mongo/sensors/mongo.py rename to providers/src/airflow/providers/mongo/sensors/mongo.py diff --git a/airflow/providers/mysql/.latest-doc-only-change.txt b/providers/src/airflow/providers/mysql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/mysql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/mysql/.latest-doc-only-change.txt diff --git a/airflow/providers/mysql/CHANGELOG.rst b/providers/src/airflow/providers/mysql/CHANGELOG.rst similarity index 100% rename from airflow/providers/mysql/CHANGELOG.rst rename to providers/src/airflow/providers/mysql/CHANGELOG.rst diff --git a/airflow/providers/mysql/__init__.py b/providers/src/airflow/providers/mysql/__init__.py similarity index 100% rename from airflow/providers/mysql/__init__.py rename to providers/src/airflow/providers/mysql/__init__.py diff --git a/airflow/providers/mysql/assets/__init__.py b/providers/src/airflow/providers/mysql/assets/__init__.py similarity index 100% rename from airflow/providers/mysql/assets/__init__.py rename to providers/src/airflow/providers/mysql/assets/__init__.py diff --git a/airflow/providers/mysql/assets/mysql.py b/providers/src/airflow/providers/mysql/assets/mysql.py similarity index 100% rename from airflow/providers/mysql/assets/mysql.py rename to providers/src/airflow/providers/mysql/assets/mysql.py diff --git a/airflow/providers/mongo/hooks/__init__.py b/providers/src/airflow/providers/mysql/hooks/__init__.py similarity index 100% rename from airflow/providers/mongo/hooks/__init__.py rename to providers/src/airflow/providers/mysql/hooks/__init__.py diff --git a/airflow/providers/mysql/hooks/mysql.py b/providers/src/airflow/providers/mysql/hooks/mysql.py similarity index 100% rename from airflow/providers/mysql/hooks/mysql.py rename to providers/src/airflow/providers/mysql/hooks/mysql.py diff --git a/airflow/providers/mongo/sensors/__init__.py b/providers/src/airflow/providers/mysql/operators/__init__.py similarity index 100% rename from airflow/providers/mongo/sensors/__init__.py rename to providers/src/airflow/providers/mysql/operators/__init__.py diff --git a/airflow/providers/mysql/operators/mysql.py b/providers/src/airflow/providers/mysql/operators/mysql.py similarity index 100% rename from airflow/providers/mysql/operators/mysql.py rename to providers/src/airflow/providers/mysql/operators/mysql.py diff --git a/airflow/providers/mysql/provider.yaml b/providers/src/airflow/providers/mysql/provider.yaml similarity index 100% rename from airflow/providers/mysql/provider.yaml rename to providers/src/airflow/providers/mysql/provider.yaml diff --git a/airflow/providers/mysql/transfers/__init__.py b/providers/src/airflow/providers/mysql/transfers/__init__.py similarity index 100% rename from airflow/providers/mysql/transfers/__init__.py rename to providers/src/airflow/providers/mysql/transfers/__init__.py diff --git a/airflow/providers/mysql/transfers/presto_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/presto_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/presto_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/presto_to_mysql.py diff --git a/airflow/providers/mysql/transfers/s3_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/s3_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/s3_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/s3_to_mysql.py diff --git a/airflow/providers/mysql/transfers/trino_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/trino_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/trino_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/trino_to_mysql.py diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/vertica_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py diff --git a/airflow/providers/neo4j/.latest-doc-only-change.txt b/providers/src/airflow/providers/neo4j/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/neo4j/.latest-doc-only-change.txt rename to providers/src/airflow/providers/neo4j/.latest-doc-only-change.txt diff --git a/airflow/providers/neo4j/CHANGELOG.rst b/providers/src/airflow/providers/neo4j/CHANGELOG.rst similarity index 100% rename from airflow/providers/neo4j/CHANGELOG.rst rename to providers/src/airflow/providers/neo4j/CHANGELOG.rst diff --git a/airflow/providers/neo4j/README.md b/providers/src/airflow/providers/neo4j/README.md similarity index 100% rename from airflow/providers/neo4j/README.md rename to providers/src/airflow/providers/neo4j/README.md diff --git a/airflow/providers/neo4j/__init__.py b/providers/src/airflow/providers/neo4j/__init__.py similarity index 100% rename from airflow/providers/neo4j/__init__.py rename to providers/src/airflow/providers/neo4j/__init__.py diff --git a/airflow/providers/mysql/hooks/__init__.py b/providers/src/airflow/providers/neo4j/hooks/__init__.py similarity index 100% rename from airflow/providers/mysql/hooks/__init__.py rename to providers/src/airflow/providers/neo4j/hooks/__init__.py diff --git a/airflow/providers/neo4j/hooks/neo4j.py b/providers/src/airflow/providers/neo4j/hooks/neo4j.py similarity index 100% rename from airflow/providers/neo4j/hooks/neo4j.py rename to providers/src/airflow/providers/neo4j/hooks/neo4j.py diff --git a/airflow/providers/mysql/operators/__init__.py b/providers/src/airflow/providers/neo4j/operators/__init__.py similarity index 100% rename from airflow/providers/mysql/operators/__init__.py rename to providers/src/airflow/providers/neo4j/operators/__init__.py diff --git a/airflow/providers/neo4j/operators/neo4j.py b/providers/src/airflow/providers/neo4j/operators/neo4j.py similarity index 100% rename from airflow/providers/neo4j/operators/neo4j.py rename to providers/src/airflow/providers/neo4j/operators/neo4j.py diff --git a/airflow/providers/neo4j/provider.yaml b/providers/src/airflow/providers/neo4j/provider.yaml similarity index 100% rename from airflow/providers/neo4j/provider.yaml rename to providers/src/airflow/providers/neo4j/provider.yaml diff --git a/airflow/providers/odbc/.latest-doc-only-change.txt b/providers/src/airflow/providers/odbc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/odbc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/odbc/.latest-doc-only-change.txt diff --git a/airflow/providers/odbc/CHANGELOG.rst b/providers/src/airflow/providers/odbc/CHANGELOG.rst similarity index 100% rename from airflow/providers/odbc/CHANGELOG.rst rename to providers/src/airflow/providers/odbc/CHANGELOG.rst diff --git a/airflow/providers/odbc/__init__.py b/providers/src/airflow/providers/odbc/__init__.py similarity index 100% rename from airflow/providers/odbc/__init__.py rename to providers/src/airflow/providers/odbc/__init__.py diff --git a/airflow/providers/odbc/hooks/__init__.py b/providers/src/airflow/providers/odbc/hooks/__init__.py similarity index 100% rename from airflow/providers/odbc/hooks/__init__.py rename to providers/src/airflow/providers/odbc/hooks/__init__.py diff --git a/airflow/providers/odbc/hooks/odbc.py b/providers/src/airflow/providers/odbc/hooks/odbc.py similarity index 100% rename from airflow/providers/odbc/hooks/odbc.py rename to providers/src/airflow/providers/odbc/hooks/odbc.py diff --git a/airflow/providers/odbc/provider.yaml b/providers/src/airflow/providers/odbc/provider.yaml similarity index 100% rename from airflow/providers/odbc/provider.yaml rename to providers/src/airflow/providers/odbc/provider.yaml diff --git a/airflow/providers/openai/.latest-doc-only-change.txt b/providers/src/airflow/providers/openai/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openai/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openai/.latest-doc-only-change.txt diff --git a/airflow/providers/openai/CHANGELOG.rst b/providers/src/airflow/providers/openai/CHANGELOG.rst similarity index 100% rename from airflow/providers/openai/CHANGELOG.rst rename to providers/src/airflow/providers/openai/CHANGELOG.rst diff --git a/airflow/providers/openai/__init__.py b/providers/src/airflow/providers/openai/__init__.py similarity index 100% rename from airflow/providers/openai/__init__.py rename to providers/src/airflow/providers/openai/__init__.py diff --git a/airflow/providers/openai/exceptions.py b/providers/src/airflow/providers/openai/exceptions.py similarity index 100% rename from airflow/providers/openai/exceptions.py rename to providers/src/airflow/providers/openai/exceptions.py diff --git a/airflow/providers/openai/hooks/__init__.py b/providers/src/airflow/providers/openai/hooks/__init__.py similarity index 100% rename from airflow/providers/openai/hooks/__init__.py rename to providers/src/airflow/providers/openai/hooks/__init__.py diff --git a/airflow/providers/openai/hooks/openai.py b/providers/src/airflow/providers/openai/hooks/openai.py similarity index 100% rename from airflow/providers/openai/hooks/openai.py rename to providers/src/airflow/providers/openai/hooks/openai.py diff --git a/airflow/providers/openai/operators/__init__.py b/providers/src/airflow/providers/openai/operators/__init__.py similarity index 100% rename from airflow/providers/openai/operators/__init__.py rename to providers/src/airflow/providers/openai/operators/__init__.py diff --git a/airflow/providers/openai/operators/openai.py b/providers/src/airflow/providers/openai/operators/openai.py similarity index 100% rename from airflow/providers/openai/operators/openai.py rename to providers/src/airflow/providers/openai/operators/openai.py diff --git a/airflow/providers/openai/provider.yaml b/providers/src/airflow/providers/openai/provider.yaml similarity index 100% rename from airflow/providers/openai/provider.yaml rename to providers/src/airflow/providers/openai/provider.yaml diff --git a/airflow/providers/openai/triggers/__init__.py b/providers/src/airflow/providers/openai/triggers/__init__.py similarity index 100% rename from airflow/providers/openai/triggers/__init__.py rename to providers/src/airflow/providers/openai/triggers/__init__.py diff --git a/airflow/providers/openai/triggers/openai.py b/providers/src/airflow/providers/openai/triggers/openai.py similarity index 100% rename from airflow/providers/openai/triggers/openai.py rename to providers/src/airflow/providers/openai/triggers/openai.py diff --git a/airflow/providers/openfaas/.latest-doc-only-change.txt b/providers/src/airflow/providers/openfaas/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openfaas/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openfaas/.latest-doc-only-change.txt diff --git a/airflow/providers/openfaas/CHANGELOG.rst b/providers/src/airflow/providers/openfaas/CHANGELOG.rst similarity index 100% rename from airflow/providers/openfaas/CHANGELOG.rst rename to providers/src/airflow/providers/openfaas/CHANGELOG.rst diff --git a/airflow/providers/openfaas/__init__.py b/providers/src/airflow/providers/openfaas/__init__.py similarity index 100% rename from airflow/providers/openfaas/__init__.py rename to providers/src/airflow/providers/openfaas/__init__.py diff --git a/airflow/providers/neo4j/hooks/__init__.py b/providers/src/airflow/providers/openfaas/hooks/__init__.py similarity index 100% rename from airflow/providers/neo4j/hooks/__init__.py rename to providers/src/airflow/providers/openfaas/hooks/__init__.py diff --git a/airflow/providers/openfaas/hooks/openfaas.py b/providers/src/airflow/providers/openfaas/hooks/openfaas.py similarity index 100% rename from airflow/providers/openfaas/hooks/openfaas.py rename to providers/src/airflow/providers/openfaas/hooks/openfaas.py diff --git a/airflow/providers/openfaas/provider.yaml b/providers/src/airflow/providers/openfaas/provider.yaml similarity index 100% rename from airflow/providers/openfaas/provider.yaml rename to providers/src/airflow/providers/openfaas/provider.yaml diff --git a/airflow/providers/openlineage/.latest-doc-only-change.txt b/providers/src/airflow/providers/openlineage/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openlineage/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openlineage/.latest-doc-only-change.txt diff --git a/airflow/providers/openlineage/CHANGELOG.rst b/providers/src/airflow/providers/openlineage/CHANGELOG.rst similarity index 100% rename from airflow/providers/openlineage/CHANGELOG.rst rename to providers/src/airflow/providers/openlineage/CHANGELOG.rst diff --git a/airflow/providers/openlineage/__init__.py b/providers/src/airflow/providers/openlineage/__init__.py similarity index 100% rename from airflow/providers/openlineage/__init__.py rename to providers/src/airflow/providers/openlineage/__init__.py diff --git a/airflow/providers/openlineage/conf.py b/providers/src/airflow/providers/openlineage/conf.py similarity index 100% rename from airflow/providers/openlineage/conf.py rename to providers/src/airflow/providers/openlineage/conf.py diff --git a/airflow/providers/openlineage/extractors/__init__.py b/providers/src/airflow/providers/openlineage/extractors/__init__.py similarity index 100% rename from airflow/providers/openlineage/extractors/__init__.py rename to providers/src/airflow/providers/openlineage/extractors/__init__.py diff --git a/airflow/providers/openlineage/extractors/base.py b/providers/src/airflow/providers/openlineage/extractors/base.py similarity index 100% rename from airflow/providers/openlineage/extractors/base.py rename to providers/src/airflow/providers/openlineage/extractors/base.py diff --git a/airflow/providers/openlineage/extractors/bash.py b/providers/src/airflow/providers/openlineage/extractors/bash.py similarity index 100% rename from airflow/providers/openlineage/extractors/bash.py rename to providers/src/airflow/providers/openlineage/extractors/bash.py diff --git a/airflow/providers/openlineage/extractors/manager.py b/providers/src/airflow/providers/openlineage/extractors/manager.py similarity index 100% rename from airflow/providers/openlineage/extractors/manager.py rename to providers/src/airflow/providers/openlineage/extractors/manager.py diff --git a/airflow/providers/openlineage/extractors/python.py b/providers/src/airflow/providers/openlineage/extractors/python.py similarity index 100% rename from airflow/providers/openlineage/extractors/python.py rename to providers/src/airflow/providers/openlineage/extractors/python.py diff --git a/airflow/providers/openlineage/facets/AirflowDagRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowDagRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowDagRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowDagRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowDebugRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowJobFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowJobFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowJobFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowJobFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowStateRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowStateRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowStateRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowStateRunFacet.json diff --git a/airflow/providers/openlineage/facets/__init__.py b/providers/src/airflow/providers/openlineage/facets/__init__.py similarity index 100% rename from airflow/providers/openlineage/facets/__init__.py rename to providers/src/airflow/providers/openlineage/facets/__init__.py diff --git a/airflow/providers/openlineage/plugins/__init__.py b/providers/src/airflow/providers/openlineage/plugins/__init__.py similarity index 100% rename from airflow/providers/openlineage/plugins/__init__.py rename to providers/src/airflow/providers/openlineage/plugins/__init__.py diff --git a/airflow/providers/openlineage/plugins/adapter.py b/providers/src/airflow/providers/openlineage/plugins/adapter.py similarity index 100% rename from airflow/providers/openlineage/plugins/adapter.py rename to providers/src/airflow/providers/openlineage/plugins/adapter.py diff --git a/airflow/providers/openlineage/plugins/facets.py b/providers/src/airflow/providers/openlineage/plugins/facets.py similarity index 100% rename from airflow/providers/openlineage/plugins/facets.py rename to providers/src/airflow/providers/openlineage/plugins/facets.py diff --git a/airflow/providers/openlineage/plugins/listener.py b/providers/src/airflow/providers/openlineage/plugins/listener.py similarity index 100% rename from airflow/providers/openlineage/plugins/listener.py rename to providers/src/airflow/providers/openlineage/plugins/listener.py diff --git a/airflow/providers/openlineage/plugins/macros.py b/providers/src/airflow/providers/openlineage/plugins/macros.py similarity index 100% rename from airflow/providers/openlineage/plugins/macros.py rename to providers/src/airflow/providers/openlineage/plugins/macros.py diff --git a/airflow/providers/openlineage/plugins/openlineage.py b/providers/src/airflow/providers/openlineage/plugins/openlineage.py similarity index 100% rename from airflow/providers/openlineage/plugins/openlineage.py rename to providers/src/airflow/providers/openlineage/plugins/openlineage.py diff --git a/airflow/providers/openlineage/provider.yaml b/providers/src/airflow/providers/openlineage/provider.yaml similarity index 100% rename from airflow/providers/openlineage/provider.yaml rename to providers/src/airflow/providers/openlineage/provider.yaml diff --git a/airflow/providers/openlineage/sqlparser.py b/providers/src/airflow/providers/openlineage/sqlparser.py similarity index 100% rename from airflow/providers/openlineage/sqlparser.py rename to providers/src/airflow/providers/openlineage/sqlparser.py diff --git a/airflow/providers/openlineage/utils/__init__.py b/providers/src/airflow/providers/openlineage/utils/__init__.py similarity index 100% rename from airflow/providers/openlineage/utils/__init__.py rename to providers/src/airflow/providers/openlineage/utils/__init__.py diff --git a/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py b/providers/src/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py similarity index 100% rename from airflow/providers/openlineage/utils/asset_compat_lineage_collector.py rename to providers/src/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py diff --git a/airflow/providers/openlineage/utils/selective_enable.py b/providers/src/airflow/providers/openlineage/utils/selective_enable.py similarity index 100% rename from airflow/providers/openlineage/utils/selective_enable.py rename to providers/src/airflow/providers/openlineage/utils/selective_enable.py diff --git a/airflow/providers/openlineage/utils/sql.py b/providers/src/airflow/providers/openlineage/utils/sql.py similarity index 100% rename from airflow/providers/openlineage/utils/sql.py rename to providers/src/airflow/providers/openlineage/utils/sql.py diff --git a/airflow/providers/openlineage/utils/utils.py b/providers/src/airflow/providers/openlineage/utils/utils.py similarity index 100% rename from airflow/providers/openlineage/utils/utils.py rename to providers/src/airflow/providers/openlineage/utils/utils.py diff --git a/airflow/providers/opensearch/.latest-doc-only-change.txt b/providers/src/airflow/providers/opensearch/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/opensearch/.latest-doc-only-change.txt rename to providers/src/airflow/providers/opensearch/.latest-doc-only-change.txt diff --git a/airflow/providers/opensearch/CHANGELOG.rst b/providers/src/airflow/providers/opensearch/CHANGELOG.rst similarity index 100% rename from airflow/providers/opensearch/CHANGELOG.rst rename to providers/src/airflow/providers/opensearch/CHANGELOG.rst diff --git a/airflow/providers/opensearch/__init__.py b/providers/src/airflow/providers/opensearch/__init__.py similarity index 100% rename from airflow/providers/opensearch/__init__.py rename to providers/src/airflow/providers/opensearch/__init__.py diff --git a/airflow/providers/opensearch/hooks/__init__.py b/providers/src/airflow/providers/opensearch/hooks/__init__.py similarity index 100% rename from airflow/providers/opensearch/hooks/__init__.py rename to providers/src/airflow/providers/opensearch/hooks/__init__.py diff --git a/airflow/providers/opensearch/hooks/opensearch.py b/providers/src/airflow/providers/opensearch/hooks/opensearch.py similarity index 100% rename from airflow/providers/opensearch/hooks/opensearch.py rename to providers/src/airflow/providers/opensearch/hooks/opensearch.py diff --git a/airflow/providers/opensearch/log/__init__.py b/providers/src/airflow/providers/opensearch/log/__init__.py similarity index 100% rename from airflow/providers/opensearch/log/__init__.py rename to providers/src/airflow/providers/opensearch/log/__init__.py diff --git a/airflow/providers/opensearch/log/os_json_formatter.py b/providers/src/airflow/providers/opensearch/log/os_json_formatter.py similarity index 100% rename from airflow/providers/opensearch/log/os_json_formatter.py rename to providers/src/airflow/providers/opensearch/log/os_json_formatter.py diff --git a/airflow/providers/opensearch/log/os_response.py b/providers/src/airflow/providers/opensearch/log/os_response.py similarity index 100% rename from airflow/providers/opensearch/log/os_response.py rename to providers/src/airflow/providers/opensearch/log/os_response.py diff --git a/airflow/providers/opensearch/log/os_task_handler.py b/providers/src/airflow/providers/opensearch/log/os_task_handler.py similarity index 100% rename from airflow/providers/opensearch/log/os_task_handler.py rename to providers/src/airflow/providers/opensearch/log/os_task_handler.py diff --git a/airflow/providers/opensearch/operators/__init__.py b/providers/src/airflow/providers/opensearch/operators/__init__.py similarity index 100% rename from airflow/providers/opensearch/operators/__init__.py rename to providers/src/airflow/providers/opensearch/operators/__init__.py diff --git a/airflow/providers/opensearch/operators/opensearch.py b/providers/src/airflow/providers/opensearch/operators/opensearch.py similarity index 100% rename from airflow/providers/opensearch/operators/opensearch.py rename to providers/src/airflow/providers/opensearch/operators/opensearch.py diff --git a/airflow/providers/opensearch/provider.yaml b/providers/src/airflow/providers/opensearch/provider.yaml similarity index 100% rename from airflow/providers/opensearch/provider.yaml rename to providers/src/airflow/providers/opensearch/provider.yaml diff --git a/airflow/providers/opsgenie/.latest-doc-only-change.txt b/providers/src/airflow/providers/opsgenie/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/opsgenie/.latest-doc-only-change.txt rename to providers/src/airflow/providers/opsgenie/.latest-doc-only-change.txt diff --git a/airflow/providers/opsgenie/CHANGELOG.rst b/providers/src/airflow/providers/opsgenie/CHANGELOG.rst similarity index 100% rename from airflow/providers/opsgenie/CHANGELOG.rst rename to providers/src/airflow/providers/opsgenie/CHANGELOG.rst diff --git a/airflow/providers/opsgenie/__init__.py b/providers/src/airflow/providers/opsgenie/__init__.py similarity index 100% rename from airflow/providers/opsgenie/__init__.py rename to providers/src/airflow/providers/opsgenie/__init__.py diff --git a/airflow/providers/neo4j/operators/__init__.py b/providers/src/airflow/providers/opsgenie/hooks/__init__.py similarity index 100% rename from airflow/providers/neo4j/operators/__init__.py rename to providers/src/airflow/providers/opsgenie/hooks/__init__.py diff --git a/airflow/providers/opsgenie/hooks/opsgenie.py b/providers/src/airflow/providers/opsgenie/hooks/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/hooks/opsgenie.py rename to providers/src/airflow/providers/opsgenie/hooks/opsgenie.py diff --git a/airflow/providers/opsgenie/notifications/__init__.py b/providers/src/airflow/providers/opsgenie/notifications/__init__.py similarity index 100% rename from airflow/providers/opsgenie/notifications/__init__.py rename to providers/src/airflow/providers/opsgenie/notifications/__init__.py diff --git a/airflow/providers/opsgenie/notifications/opsgenie.py b/providers/src/airflow/providers/opsgenie/notifications/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/notifications/opsgenie.py rename to providers/src/airflow/providers/opsgenie/notifications/opsgenie.py diff --git a/airflow/providers/openfaas/hooks/__init__.py b/providers/src/airflow/providers/opsgenie/operators/__init__.py similarity index 100% rename from airflow/providers/openfaas/hooks/__init__.py rename to providers/src/airflow/providers/opsgenie/operators/__init__.py diff --git a/airflow/providers/opsgenie/operators/opsgenie.py b/providers/src/airflow/providers/opsgenie/operators/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/operators/opsgenie.py rename to providers/src/airflow/providers/opsgenie/operators/opsgenie.py diff --git a/airflow/providers/opsgenie/provider.yaml b/providers/src/airflow/providers/opsgenie/provider.yaml similarity index 100% rename from airflow/providers/opsgenie/provider.yaml rename to providers/src/airflow/providers/opsgenie/provider.yaml diff --git a/airflow/providers/opsgenie/typing/__init__.py b/providers/src/airflow/providers/opsgenie/typing/__init__.py similarity index 100% rename from airflow/providers/opsgenie/typing/__init__.py rename to providers/src/airflow/providers/opsgenie/typing/__init__.py diff --git a/airflow/providers/opsgenie/typing/opsgenie.py b/providers/src/airflow/providers/opsgenie/typing/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/typing/opsgenie.py rename to providers/src/airflow/providers/opsgenie/typing/opsgenie.py diff --git a/airflow/providers/oracle/.latest-doc-only-change.txt b/providers/src/airflow/providers/oracle/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/oracle/.latest-doc-only-change.txt rename to providers/src/airflow/providers/oracle/.latest-doc-only-change.txt diff --git a/airflow/providers/oracle/CHANGELOG.rst b/providers/src/airflow/providers/oracle/CHANGELOG.rst similarity index 100% rename from airflow/providers/oracle/CHANGELOG.rst rename to providers/src/airflow/providers/oracle/CHANGELOG.rst diff --git a/airflow/providers/oracle/__init__.py b/providers/src/airflow/providers/oracle/__init__.py similarity index 100% rename from airflow/providers/oracle/__init__.py rename to providers/src/airflow/providers/oracle/__init__.py diff --git a/airflow/providers/oracle/example_dags/__init__.py b/providers/src/airflow/providers/oracle/example_dags/__init__.py similarity index 100% rename from airflow/providers/oracle/example_dags/__init__.py rename to providers/src/airflow/providers/oracle/example_dags/__init__.py diff --git a/airflow/providers/oracle/example_dags/example_oracle.py b/providers/src/airflow/providers/oracle/example_dags/example_oracle.py similarity index 100% rename from airflow/providers/oracle/example_dags/example_oracle.py rename to providers/src/airflow/providers/oracle/example_dags/example_oracle.py diff --git a/airflow/providers/opsgenie/hooks/__init__.py b/providers/src/airflow/providers/oracle/hooks/__init__.py similarity index 100% rename from airflow/providers/opsgenie/hooks/__init__.py rename to providers/src/airflow/providers/oracle/hooks/__init__.py diff --git a/airflow/providers/oracle/hooks/oracle.py b/providers/src/airflow/providers/oracle/hooks/oracle.py similarity index 100% rename from airflow/providers/oracle/hooks/oracle.py rename to providers/src/airflow/providers/oracle/hooks/oracle.py diff --git a/airflow/providers/opsgenie/operators/__init__.py b/providers/src/airflow/providers/oracle/operators/__init__.py similarity index 100% rename from airflow/providers/opsgenie/operators/__init__.py rename to providers/src/airflow/providers/oracle/operators/__init__.py diff --git a/airflow/providers/oracle/operators/oracle.py b/providers/src/airflow/providers/oracle/operators/oracle.py similarity index 100% rename from airflow/providers/oracle/operators/oracle.py rename to providers/src/airflow/providers/oracle/operators/oracle.py diff --git a/airflow/providers/oracle/provider.yaml b/providers/src/airflow/providers/oracle/provider.yaml similarity index 100% rename from airflow/providers/oracle/provider.yaml rename to providers/src/airflow/providers/oracle/provider.yaml diff --git a/airflow/providers/oracle/transfers/__init__.py b/providers/src/airflow/providers/oracle/transfers/__init__.py similarity index 100% rename from airflow/providers/oracle/transfers/__init__.py rename to providers/src/airflow/providers/oracle/transfers/__init__.py diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py b/providers/src/airflow/providers/oracle/transfers/oracle_to_oracle.py similarity index 100% rename from airflow/providers/oracle/transfers/oracle_to_oracle.py rename to providers/src/airflow/providers/oracle/transfers/oracle_to_oracle.py diff --git a/airflow/providers/pagerduty/.latest-doc-only-change.txt b/providers/src/airflow/providers/pagerduty/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pagerduty/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pagerduty/.latest-doc-only-change.txt diff --git a/airflow/providers/pagerduty/CHANGELOG.rst b/providers/src/airflow/providers/pagerduty/CHANGELOG.rst similarity index 100% rename from airflow/providers/pagerduty/CHANGELOG.rst rename to providers/src/airflow/providers/pagerduty/CHANGELOG.rst diff --git a/airflow/providers/pagerduty/__init__.py b/providers/src/airflow/providers/pagerduty/__init__.py similarity index 100% rename from airflow/providers/pagerduty/__init__.py rename to providers/src/airflow/providers/pagerduty/__init__.py diff --git a/airflow/providers/oracle/hooks/__init__.py b/providers/src/airflow/providers/pagerduty/hooks/__init__.py similarity index 100% rename from airflow/providers/oracle/hooks/__init__.py rename to providers/src/airflow/providers/pagerduty/hooks/__init__.py diff --git a/airflow/providers/pagerduty/hooks/pagerduty.py b/providers/src/airflow/providers/pagerduty/hooks/pagerduty.py similarity index 100% rename from airflow/providers/pagerduty/hooks/pagerduty.py rename to providers/src/airflow/providers/pagerduty/hooks/pagerduty.py diff --git a/airflow/providers/pagerduty/hooks/pagerduty_events.py b/providers/src/airflow/providers/pagerduty/hooks/pagerduty_events.py similarity index 100% rename from airflow/providers/pagerduty/hooks/pagerduty_events.py rename to providers/src/airflow/providers/pagerduty/hooks/pagerduty_events.py diff --git a/airflow/providers/pagerduty/notifications/__init__.py b/providers/src/airflow/providers/pagerduty/notifications/__init__.py similarity index 100% rename from airflow/providers/pagerduty/notifications/__init__.py rename to providers/src/airflow/providers/pagerduty/notifications/__init__.py diff --git a/airflow/providers/pagerduty/notifications/pagerduty.py b/providers/src/airflow/providers/pagerduty/notifications/pagerduty.py similarity index 100% rename from airflow/providers/pagerduty/notifications/pagerduty.py rename to providers/src/airflow/providers/pagerduty/notifications/pagerduty.py diff --git a/airflow/providers/pagerduty/provider.yaml b/providers/src/airflow/providers/pagerduty/provider.yaml similarity index 100% rename from airflow/providers/pagerduty/provider.yaml rename to providers/src/airflow/providers/pagerduty/provider.yaml diff --git a/airflow/providers/papermill/.latest-doc-only-change.txt b/providers/src/airflow/providers/papermill/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/papermill/.latest-doc-only-change.txt rename to providers/src/airflow/providers/papermill/.latest-doc-only-change.txt diff --git a/airflow/providers/papermill/CHANGELOG.rst b/providers/src/airflow/providers/papermill/CHANGELOG.rst similarity index 100% rename from airflow/providers/papermill/CHANGELOG.rst rename to providers/src/airflow/providers/papermill/CHANGELOG.rst diff --git a/airflow/providers/papermill/__init__.py b/providers/src/airflow/providers/papermill/__init__.py similarity index 100% rename from airflow/providers/papermill/__init__.py rename to providers/src/airflow/providers/papermill/__init__.py diff --git a/airflow/providers/oracle/operators/__init__.py b/providers/src/airflow/providers/papermill/hooks/__init__.py similarity index 100% rename from airflow/providers/oracle/operators/__init__.py rename to providers/src/airflow/providers/papermill/hooks/__init__.py diff --git a/airflow/providers/papermill/hooks/kernel.py b/providers/src/airflow/providers/papermill/hooks/kernel.py similarity index 100% rename from airflow/providers/papermill/hooks/kernel.py rename to providers/src/airflow/providers/papermill/hooks/kernel.py diff --git a/airflow/providers/pagerduty/hooks/__init__.py b/providers/src/airflow/providers/papermill/operators/__init__.py similarity index 100% rename from airflow/providers/pagerduty/hooks/__init__.py rename to providers/src/airflow/providers/papermill/operators/__init__.py diff --git a/airflow/providers/papermill/operators/papermill.py b/providers/src/airflow/providers/papermill/operators/papermill.py similarity index 100% rename from airflow/providers/papermill/operators/papermill.py rename to providers/src/airflow/providers/papermill/operators/papermill.py diff --git a/airflow/providers/papermill/provider.yaml b/providers/src/airflow/providers/papermill/provider.yaml similarity index 100% rename from airflow/providers/papermill/provider.yaml rename to providers/src/airflow/providers/papermill/provider.yaml diff --git a/airflow/providers/pgvector/.latest-doc-only-change.txt b/providers/src/airflow/providers/pgvector/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pgvector/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pgvector/.latest-doc-only-change.txt diff --git a/airflow/providers/pgvector/CHANGELOG.rst b/providers/src/airflow/providers/pgvector/CHANGELOG.rst similarity index 100% rename from airflow/providers/pgvector/CHANGELOG.rst rename to providers/src/airflow/providers/pgvector/CHANGELOG.rst diff --git a/airflow/providers/pgvector/__init__.py b/providers/src/airflow/providers/pgvector/__init__.py similarity index 100% rename from airflow/providers/pgvector/__init__.py rename to providers/src/airflow/providers/pgvector/__init__.py diff --git a/airflow/providers/pgvector/hooks/__init__.py b/providers/src/airflow/providers/pgvector/hooks/__init__.py similarity index 100% rename from airflow/providers/pgvector/hooks/__init__.py rename to providers/src/airflow/providers/pgvector/hooks/__init__.py diff --git a/airflow/providers/pgvector/hooks/pgvector.py b/providers/src/airflow/providers/pgvector/hooks/pgvector.py similarity index 100% rename from airflow/providers/pgvector/hooks/pgvector.py rename to providers/src/airflow/providers/pgvector/hooks/pgvector.py diff --git a/airflow/providers/pgvector/operators/__init__.py b/providers/src/airflow/providers/pgvector/operators/__init__.py similarity index 100% rename from airflow/providers/pgvector/operators/__init__.py rename to providers/src/airflow/providers/pgvector/operators/__init__.py diff --git a/airflow/providers/pgvector/operators/pgvector.py b/providers/src/airflow/providers/pgvector/operators/pgvector.py similarity index 100% rename from airflow/providers/pgvector/operators/pgvector.py rename to providers/src/airflow/providers/pgvector/operators/pgvector.py diff --git a/airflow/providers/pgvector/provider.yaml b/providers/src/airflow/providers/pgvector/provider.yaml similarity index 100% rename from airflow/providers/pgvector/provider.yaml rename to providers/src/airflow/providers/pgvector/provider.yaml diff --git a/airflow/providers/pinecone/.latest-doc-only-change.txt b/providers/src/airflow/providers/pinecone/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pinecone/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pinecone/.latest-doc-only-change.txt diff --git a/airflow/providers/pinecone/CHANGELOG.rst b/providers/src/airflow/providers/pinecone/CHANGELOG.rst similarity index 100% rename from airflow/providers/pinecone/CHANGELOG.rst rename to providers/src/airflow/providers/pinecone/CHANGELOG.rst diff --git a/airflow/providers/pinecone/__init__.py b/providers/src/airflow/providers/pinecone/__init__.py similarity index 100% rename from airflow/providers/pinecone/__init__.py rename to providers/src/airflow/providers/pinecone/__init__.py diff --git a/airflow/providers/pinecone/hooks/__init__.py b/providers/src/airflow/providers/pinecone/hooks/__init__.py similarity index 100% rename from airflow/providers/pinecone/hooks/__init__.py rename to providers/src/airflow/providers/pinecone/hooks/__init__.py diff --git a/airflow/providers/pinecone/hooks/pinecone.py b/providers/src/airflow/providers/pinecone/hooks/pinecone.py similarity index 100% rename from airflow/providers/pinecone/hooks/pinecone.py rename to providers/src/airflow/providers/pinecone/hooks/pinecone.py diff --git a/airflow/providers/pinecone/operators/__init__.py b/providers/src/airflow/providers/pinecone/operators/__init__.py similarity index 100% rename from airflow/providers/pinecone/operators/__init__.py rename to providers/src/airflow/providers/pinecone/operators/__init__.py diff --git a/airflow/providers/pinecone/operators/pinecone.py b/providers/src/airflow/providers/pinecone/operators/pinecone.py similarity index 100% rename from airflow/providers/pinecone/operators/pinecone.py rename to providers/src/airflow/providers/pinecone/operators/pinecone.py diff --git a/airflow/providers/pinecone/provider.yaml b/providers/src/airflow/providers/pinecone/provider.yaml similarity index 100% rename from airflow/providers/pinecone/provider.yaml rename to providers/src/airflow/providers/pinecone/provider.yaml diff --git a/airflow/providers/postgres/.latest-doc-only-change.txt b/providers/src/airflow/providers/postgres/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/postgres/.latest-doc-only-change.txt rename to providers/src/airflow/providers/postgres/.latest-doc-only-change.txt diff --git a/airflow/providers/postgres/CHANGELOG.rst b/providers/src/airflow/providers/postgres/CHANGELOG.rst similarity index 100% rename from airflow/providers/postgres/CHANGELOG.rst rename to providers/src/airflow/providers/postgres/CHANGELOG.rst diff --git a/airflow/providers/postgres/__init__.py b/providers/src/airflow/providers/postgres/__init__.py similarity index 100% rename from airflow/providers/postgres/__init__.py rename to providers/src/airflow/providers/postgres/__init__.py diff --git a/airflow/providers/postgres/assets/__init__.py b/providers/src/airflow/providers/postgres/assets/__init__.py similarity index 100% rename from airflow/providers/postgres/assets/__init__.py rename to providers/src/airflow/providers/postgres/assets/__init__.py diff --git a/airflow/providers/postgres/assets/postgres.py b/providers/src/airflow/providers/postgres/assets/postgres.py similarity index 100% rename from airflow/providers/postgres/assets/postgres.py rename to providers/src/airflow/providers/postgres/assets/postgres.py diff --git a/airflow/providers/papermill/hooks/__init__.py b/providers/src/airflow/providers/postgres/hooks/__init__.py similarity index 100% rename from airflow/providers/papermill/hooks/__init__.py rename to providers/src/airflow/providers/postgres/hooks/__init__.py diff --git a/airflow/providers/postgres/hooks/postgres.py b/providers/src/airflow/providers/postgres/hooks/postgres.py similarity index 100% rename from airflow/providers/postgres/hooks/postgres.py rename to providers/src/airflow/providers/postgres/hooks/postgres.py diff --git a/airflow/providers/papermill/operators/__init__.py b/providers/src/airflow/providers/postgres/operators/__init__.py similarity index 100% rename from airflow/providers/papermill/operators/__init__.py rename to providers/src/airflow/providers/postgres/operators/__init__.py diff --git a/airflow/providers/postgres/operators/postgres.py b/providers/src/airflow/providers/postgres/operators/postgres.py similarity index 100% rename from airflow/providers/postgres/operators/postgres.py rename to providers/src/airflow/providers/postgres/operators/postgres.py diff --git a/airflow/providers/postgres/provider.yaml b/providers/src/airflow/providers/postgres/provider.yaml similarity index 100% rename from airflow/providers/postgres/provider.yaml rename to providers/src/airflow/providers/postgres/provider.yaml diff --git a/airflow/providers/presto/.latest-doc-only-change.txt b/providers/src/airflow/providers/presto/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/presto/.latest-doc-only-change.txt rename to providers/src/airflow/providers/presto/.latest-doc-only-change.txt diff --git a/airflow/providers/presto/CHANGELOG.rst b/providers/src/airflow/providers/presto/CHANGELOG.rst similarity index 100% rename from airflow/providers/presto/CHANGELOG.rst rename to providers/src/airflow/providers/presto/CHANGELOG.rst diff --git a/airflow/providers/presto/__init__.py b/providers/src/airflow/providers/presto/__init__.py similarity index 100% rename from airflow/providers/presto/__init__.py rename to providers/src/airflow/providers/presto/__init__.py diff --git a/airflow/providers/postgres/hooks/__init__.py b/providers/src/airflow/providers/presto/hooks/__init__.py similarity index 100% rename from airflow/providers/postgres/hooks/__init__.py rename to providers/src/airflow/providers/presto/hooks/__init__.py diff --git a/airflow/providers/presto/hooks/presto.py b/providers/src/airflow/providers/presto/hooks/presto.py similarity index 100% rename from airflow/providers/presto/hooks/presto.py rename to providers/src/airflow/providers/presto/hooks/presto.py diff --git a/airflow/providers/presto/provider.yaml b/providers/src/airflow/providers/presto/provider.yaml similarity index 100% rename from airflow/providers/presto/provider.yaml rename to providers/src/airflow/providers/presto/provider.yaml diff --git a/airflow/providers/presto/transfers/__init__.py b/providers/src/airflow/providers/presto/transfers/__init__.py similarity index 100% rename from airflow/providers/presto/transfers/__init__.py rename to providers/src/airflow/providers/presto/transfers/__init__.py diff --git a/airflow/providers/presto/transfers/gcs_to_presto.py b/providers/src/airflow/providers/presto/transfers/gcs_to_presto.py similarity index 100% rename from airflow/providers/presto/transfers/gcs_to_presto.py rename to providers/src/airflow/providers/presto/transfers/gcs_to_presto.py diff --git a/airflow/providers/qdrant/.latest-doc-only-change.txt b/providers/src/airflow/providers/qdrant/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/qdrant/.latest-doc-only-change.txt rename to providers/src/airflow/providers/qdrant/.latest-doc-only-change.txt diff --git a/airflow/providers/qdrant/CHANGELOG.rst b/providers/src/airflow/providers/qdrant/CHANGELOG.rst similarity index 100% rename from airflow/providers/qdrant/CHANGELOG.rst rename to providers/src/airflow/providers/qdrant/CHANGELOG.rst diff --git a/airflow/providers/qdrant/__init__.py b/providers/src/airflow/providers/qdrant/__init__.py similarity index 100% rename from airflow/providers/qdrant/__init__.py rename to providers/src/airflow/providers/qdrant/__init__.py diff --git a/airflow/providers/qdrant/hooks/__init__.py b/providers/src/airflow/providers/qdrant/hooks/__init__.py similarity index 100% rename from airflow/providers/qdrant/hooks/__init__.py rename to providers/src/airflow/providers/qdrant/hooks/__init__.py diff --git a/airflow/providers/qdrant/hooks/qdrant.py b/providers/src/airflow/providers/qdrant/hooks/qdrant.py similarity index 100% rename from airflow/providers/qdrant/hooks/qdrant.py rename to providers/src/airflow/providers/qdrant/hooks/qdrant.py diff --git a/airflow/providers/qdrant/operators/__init__.py b/providers/src/airflow/providers/qdrant/operators/__init__.py similarity index 100% rename from airflow/providers/qdrant/operators/__init__.py rename to providers/src/airflow/providers/qdrant/operators/__init__.py diff --git a/airflow/providers/qdrant/operators/qdrant.py b/providers/src/airflow/providers/qdrant/operators/qdrant.py similarity index 100% rename from airflow/providers/qdrant/operators/qdrant.py rename to providers/src/airflow/providers/qdrant/operators/qdrant.py diff --git a/airflow/providers/qdrant/provider.yaml b/providers/src/airflow/providers/qdrant/provider.yaml similarity index 100% rename from airflow/providers/qdrant/provider.yaml rename to providers/src/airflow/providers/qdrant/provider.yaml diff --git a/airflow/providers/redis/.latest-doc-only-change.txt b/providers/src/airflow/providers/redis/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/redis/.latest-doc-only-change.txt rename to providers/src/airflow/providers/redis/.latest-doc-only-change.txt diff --git a/airflow/providers/redis/CHANGELOG.rst b/providers/src/airflow/providers/redis/CHANGELOG.rst similarity index 100% rename from airflow/providers/redis/CHANGELOG.rst rename to providers/src/airflow/providers/redis/CHANGELOG.rst diff --git a/airflow/providers/redis/__init__.py b/providers/src/airflow/providers/redis/__init__.py similarity index 100% rename from airflow/providers/redis/__init__.py rename to providers/src/airflow/providers/redis/__init__.py diff --git a/airflow/providers/postgres/operators/__init__.py b/providers/src/airflow/providers/redis/hooks/__init__.py similarity index 100% rename from airflow/providers/postgres/operators/__init__.py rename to providers/src/airflow/providers/redis/hooks/__init__.py diff --git a/airflow/providers/redis/hooks/redis.py b/providers/src/airflow/providers/redis/hooks/redis.py similarity index 100% rename from airflow/providers/redis/hooks/redis.py rename to providers/src/airflow/providers/redis/hooks/redis.py diff --git a/airflow/providers/presto/hooks/__init__.py b/providers/src/airflow/providers/redis/log/__init__.py similarity index 100% rename from airflow/providers/presto/hooks/__init__.py rename to providers/src/airflow/providers/redis/log/__init__.py diff --git a/airflow/providers/redis/log/redis_task_handler.py b/providers/src/airflow/providers/redis/log/redis_task_handler.py similarity index 100% rename from airflow/providers/redis/log/redis_task_handler.py rename to providers/src/airflow/providers/redis/log/redis_task_handler.py diff --git a/airflow/providers/redis/hooks/__init__.py b/providers/src/airflow/providers/redis/operators/__init__.py similarity index 100% rename from airflow/providers/redis/hooks/__init__.py rename to providers/src/airflow/providers/redis/operators/__init__.py diff --git a/airflow/providers/redis/operators/redis_publish.py b/providers/src/airflow/providers/redis/operators/redis_publish.py similarity index 100% rename from airflow/providers/redis/operators/redis_publish.py rename to providers/src/airflow/providers/redis/operators/redis_publish.py diff --git a/airflow/providers/redis/provider.yaml b/providers/src/airflow/providers/redis/provider.yaml similarity index 100% rename from airflow/providers/redis/provider.yaml rename to providers/src/airflow/providers/redis/provider.yaml diff --git a/airflow/providers/redis/log/__init__.py b/providers/src/airflow/providers/redis/sensors/__init__.py similarity index 100% rename from airflow/providers/redis/log/__init__.py rename to providers/src/airflow/providers/redis/sensors/__init__.py diff --git a/airflow/providers/redis/sensors/redis_key.py b/providers/src/airflow/providers/redis/sensors/redis_key.py similarity index 100% rename from airflow/providers/redis/sensors/redis_key.py rename to providers/src/airflow/providers/redis/sensors/redis_key.py diff --git a/airflow/providers/redis/sensors/redis_pub_sub.py b/providers/src/airflow/providers/redis/sensors/redis_pub_sub.py similarity index 100% rename from airflow/providers/redis/sensors/redis_pub_sub.py rename to providers/src/airflow/providers/redis/sensors/redis_pub_sub.py diff --git a/airflow/providers/salesforce/.latest-doc-only-change.txt b/providers/src/airflow/providers/salesforce/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/salesforce/.latest-doc-only-change.txt rename to providers/src/airflow/providers/salesforce/.latest-doc-only-change.txt diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/providers/src/airflow/providers/salesforce/CHANGELOG.rst similarity index 100% rename from airflow/providers/salesforce/CHANGELOG.rst rename to providers/src/airflow/providers/salesforce/CHANGELOG.rst diff --git a/airflow/providers/salesforce/__init__.py b/providers/src/airflow/providers/salesforce/__init__.py similarity index 100% rename from airflow/providers/salesforce/__init__.py rename to providers/src/airflow/providers/salesforce/__init__.py diff --git a/airflow/providers/redis/operators/__init__.py b/providers/src/airflow/providers/salesforce/hooks/__init__.py similarity index 100% rename from airflow/providers/redis/operators/__init__.py rename to providers/src/airflow/providers/salesforce/hooks/__init__.py diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/providers/src/airflow/providers/salesforce/hooks/salesforce.py similarity index 100% rename from airflow/providers/salesforce/hooks/salesforce.py rename to providers/src/airflow/providers/salesforce/hooks/salesforce.py diff --git a/airflow/providers/salesforce/operators/__init__.py b/providers/src/airflow/providers/salesforce/operators/__init__.py similarity index 100% rename from airflow/providers/salesforce/operators/__init__.py rename to providers/src/airflow/providers/salesforce/operators/__init__.py diff --git a/airflow/providers/salesforce/operators/bulk.py b/providers/src/airflow/providers/salesforce/operators/bulk.py similarity index 100% rename from airflow/providers/salesforce/operators/bulk.py rename to providers/src/airflow/providers/salesforce/operators/bulk.py diff --git a/airflow/providers/salesforce/operators/salesforce_apex_rest.py b/providers/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py similarity index 100% rename from airflow/providers/salesforce/operators/salesforce_apex_rest.py rename to providers/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py diff --git a/airflow/providers/salesforce/provider.yaml b/providers/src/airflow/providers/salesforce/provider.yaml similarity index 100% rename from airflow/providers/salesforce/provider.yaml rename to providers/src/airflow/providers/salesforce/provider.yaml diff --git a/airflow/providers/samba/.latest-doc-only-change.txt b/providers/src/airflow/providers/samba/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/samba/.latest-doc-only-change.txt rename to providers/src/airflow/providers/samba/.latest-doc-only-change.txt diff --git a/airflow/providers/samba/CHANGELOG.rst b/providers/src/airflow/providers/samba/CHANGELOG.rst similarity index 100% rename from airflow/providers/samba/CHANGELOG.rst rename to providers/src/airflow/providers/samba/CHANGELOG.rst diff --git a/airflow/providers/samba/__init__.py b/providers/src/airflow/providers/samba/__init__.py similarity index 100% rename from airflow/providers/samba/__init__.py rename to providers/src/airflow/providers/samba/__init__.py diff --git a/airflow/providers/redis/sensors/__init__.py b/providers/src/airflow/providers/samba/hooks/__init__.py similarity index 100% rename from airflow/providers/redis/sensors/__init__.py rename to providers/src/airflow/providers/samba/hooks/__init__.py diff --git a/airflow/providers/samba/hooks/samba.py b/providers/src/airflow/providers/samba/hooks/samba.py similarity index 100% rename from airflow/providers/samba/hooks/samba.py rename to providers/src/airflow/providers/samba/hooks/samba.py diff --git a/airflow/providers/samba/provider.yaml b/providers/src/airflow/providers/samba/provider.yaml similarity index 100% rename from airflow/providers/samba/provider.yaml rename to providers/src/airflow/providers/samba/provider.yaml diff --git a/airflow/providers/samba/transfers/__init__.py b/providers/src/airflow/providers/samba/transfers/__init__.py similarity index 100% rename from airflow/providers/samba/transfers/__init__.py rename to providers/src/airflow/providers/samba/transfers/__init__.py diff --git a/airflow/providers/samba/transfers/gcs_to_samba.py b/providers/src/airflow/providers/samba/transfers/gcs_to_samba.py similarity index 100% rename from airflow/providers/samba/transfers/gcs_to_samba.py rename to providers/src/airflow/providers/samba/transfers/gcs_to_samba.py diff --git a/airflow/providers/segment/.latest-doc-only-change.txt b/providers/src/airflow/providers/segment/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/segment/.latest-doc-only-change.txt rename to providers/src/airflow/providers/segment/.latest-doc-only-change.txt diff --git a/airflow/providers/segment/CHANGELOG.rst b/providers/src/airflow/providers/segment/CHANGELOG.rst similarity index 100% rename from airflow/providers/segment/CHANGELOG.rst rename to providers/src/airflow/providers/segment/CHANGELOG.rst diff --git a/airflow/providers/segment/__init__.py b/providers/src/airflow/providers/segment/__init__.py similarity index 100% rename from airflow/providers/segment/__init__.py rename to providers/src/airflow/providers/segment/__init__.py diff --git a/airflow/providers/salesforce/hooks/__init__.py b/providers/src/airflow/providers/segment/hooks/__init__.py similarity index 100% rename from airflow/providers/salesforce/hooks/__init__.py rename to providers/src/airflow/providers/segment/hooks/__init__.py diff --git a/airflow/providers/segment/hooks/segment.py b/providers/src/airflow/providers/segment/hooks/segment.py similarity index 100% rename from airflow/providers/segment/hooks/segment.py rename to providers/src/airflow/providers/segment/hooks/segment.py diff --git a/airflow/providers/samba/hooks/__init__.py b/providers/src/airflow/providers/segment/operators/__init__.py similarity index 100% rename from airflow/providers/samba/hooks/__init__.py rename to providers/src/airflow/providers/segment/operators/__init__.py diff --git a/airflow/providers/segment/operators/segment_track_event.py b/providers/src/airflow/providers/segment/operators/segment_track_event.py similarity index 100% rename from airflow/providers/segment/operators/segment_track_event.py rename to providers/src/airflow/providers/segment/operators/segment_track_event.py diff --git a/airflow/providers/segment/provider.yaml b/providers/src/airflow/providers/segment/provider.yaml similarity index 100% rename from airflow/providers/segment/provider.yaml rename to providers/src/airflow/providers/segment/provider.yaml diff --git a/airflow/providers/sendgrid/.latest-doc-only-change.txt b/providers/src/airflow/providers/sendgrid/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sendgrid/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sendgrid/.latest-doc-only-change.txt diff --git a/airflow/providers/sendgrid/CHANGELOG.rst b/providers/src/airflow/providers/sendgrid/CHANGELOG.rst similarity index 100% rename from airflow/providers/sendgrid/CHANGELOG.rst rename to providers/src/airflow/providers/sendgrid/CHANGELOG.rst diff --git a/airflow/providers/sendgrid/__init__.py b/providers/src/airflow/providers/sendgrid/__init__.py similarity index 100% rename from airflow/providers/sendgrid/__init__.py rename to providers/src/airflow/providers/sendgrid/__init__.py diff --git a/airflow/providers/sendgrid/provider.yaml b/providers/src/airflow/providers/sendgrid/provider.yaml similarity index 100% rename from airflow/providers/sendgrid/provider.yaml rename to providers/src/airflow/providers/sendgrid/provider.yaml diff --git a/airflow/providers/sendgrid/utils/__init__.py b/providers/src/airflow/providers/sendgrid/utils/__init__.py similarity index 100% rename from airflow/providers/sendgrid/utils/__init__.py rename to providers/src/airflow/providers/sendgrid/utils/__init__.py diff --git a/airflow/providers/sendgrid/utils/emailer.py b/providers/src/airflow/providers/sendgrid/utils/emailer.py similarity index 100% rename from airflow/providers/sendgrid/utils/emailer.py rename to providers/src/airflow/providers/sendgrid/utils/emailer.py diff --git a/airflow/providers/sftp/.latest-doc-only-change.txt b/providers/src/airflow/providers/sftp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sftp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sftp/.latest-doc-only-change.txt diff --git a/airflow/providers/sftp/CHANGELOG.rst b/providers/src/airflow/providers/sftp/CHANGELOG.rst similarity index 100% rename from airflow/providers/sftp/CHANGELOG.rst rename to providers/src/airflow/providers/sftp/CHANGELOG.rst diff --git a/airflow/providers/sftp/__init__.py b/providers/src/airflow/providers/sftp/__init__.py similarity index 100% rename from airflow/providers/sftp/__init__.py rename to providers/src/airflow/providers/sftp/__init__.py diff --git a/airflow/providers/sftp/decorators/__init__.py b/providers/src/airflow/providers/sftp/decorators/__init__.py similarity index 100% rename from airflow/providers/sftp/decorators/__init__.py rename to providers/src/airflow/providers/sftp/decorators/__init__.py diff --git a/airflow/providers/sftp/decorators/sensors/__init__.py b/providers/src/airflow/providers/sftp/decorators/sensors/__init__.py similarity index 100% rename from airflow/providers/sftp/decorators/sensors/__init__.py rename to providers/src/airflow/providers/sftp/decorators/sensors/__init__.py diff --git a/airflow/providers/sftp/decorators/sensors/sftp.py b/providers/src/airflow/providers/sftp/decorators/sensors/sftp.py similarity index 100% rename from airflow/providers/sftp/decorators/sensors/sftp.py rename to providers/src/airflow/providers/sftp/decorators/sensors/sftp.py diff --git a/airflow/providers/sftp/hooks/__init__.py b/providers/src/airflow/providers/sftp/hooks/__init__.py similarity index 100% rename from airflow/providers/sftp/hooks/__init__.py rename to providers/src/airflow/providers/sftp/hooks/__init__.py diff --git a/airflow/providers/sftp/hooks/sftp.py b/providers/src/airflow/providers/sftp/hooks/sftp.py similarity index 100% rename from airflow/providers/sftp/hooks/sftp.py rename to providers/src/airflow/providers/sftp/hooks/sftp.py diff --git a/airflow/providers/sftp/operators/__init__.py b/providers/src/airflow/providers/sftp/operators/__init__.py similarity index 100% rename from airflow/providers/sftp/operators/__init__.py rename to providers/src/airflow/providers/sftp/operators/__init__.py diff --git a/airflow/providers/sftp/operators/sftp.py b/providers/src/airflow/providers/sftp/operators/sftp.py similarity index 100% rename from airflow/providers/sftp/operators/sftp.py rename to providers/src/airflow/providers/sftp/operators/sftp.py diff --git a/airflow/providers/sftp/provider.yaml b/providers/src/airflow/providers/sftp/provider.yaml similarity index 100% rename from airflow/providers/sftp/provider.yaml rename to providers/src/airflow/providers/sftp/provider.yaml diff --git a/airflow/providers/sftp/sensors/__init__.py b/providers/src/airflow/providers/sftp/sensors/__init__.py similarity index 100% rename from airflow/providers/sftp/sensors/__init__.py rename to providers/src/airflow/providers/sftp/sensors/__init__.py diff --git a/airflow/providers/sftp/sensors/sftp.py b/providers/src/airflow/providers/sftp/sensors/sftp.py similarity index 100% rename from airflow/providers/sftp/sensors/sftp.py rename to providers/src/airflow/providers/sftp/sensors/sftp.py diff --git a/airflow/providers/sftp/triggers/__init__.py b/providers/src/airflow/providers/sftp/triggers/__init__.py similarity index 100% rename from airflow/providers/sftp/triggers/__init__.py rename to providers/src/airflow/providers/sftp/triggers/__init__.py diff --git a/airflow/providers/sftp/triggers/sftp.py b/providers/src/airflow/providers/sftp/triggers/sftp.py similarity index 100% rename from airflow/providers/sftp/triggers/sftp.py rename to providers/src/airflow/providers/sftp/triggers/sftp.py diff --git a/airflow/providers/singularity/.latest-doc-only-change.txt b/providers/src/airflow/providers/singularity/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/singularity/.latest-doc-only-change.txt rename to providers/src/airflow/providers/singularity/.latest-doc-only-change.txt diff --git a/airflow/providers/singularity/CHANGELOG.rst b/providers/src/airflow/providers/singularity/CHANGELOG.rst similarity index 100% rename from airflow/providers/singularity/CHANGELOG.rst rename to providers/src/airflow/providers/singularity/CHANGELOG.rst diff --git a/airflow/providers/singularity/__init__.py b/providers/src/airflow/providers/singularity/__init__.py similarity index 100% rename from airflow/providers/singularity/__init__.py rename to providers/src/airflow/providers/singularity/__init__.py diff --git a/airflow/providers/segment/hooks/__init__.py b/providers/src/airflow/providers/singularity/operators/__init__.py similarity index 100% rename from airflow/providers/segment/hooks/__init__.py rename to providers/src/airflow/providers/singularity/operators/__init__.py diff --git a/airflow/providers/singularity/operators/singularity.py b/providers/src/airflow/providers/singularity/operators/singularity.py similarity index 100% rename from airflow/providers/singularity/operators/singularity.py rename to providers/src/airflow/providers/singularity/operators/singularity.py diff --git a/airflow/providers/singularity/provider.yaml b/providers/src/airflow/providers/singularity/provider.yaml similarity index 100% rename from airflow/providers/singularity/provider.yaml rename to providers/src/airflow/providers/singularity/provider.yaml diff --git a/airflow/providers/slack/.latest-doc-only-change.txt b/providers/src/airflow/providers/slack/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/slack/.latest-doc-only-change.txt rename to providers/src/airflow/providers/slack/.latest-doc-only-change.txt diff --git a/airflow/providers/slack/CHANGELOG.rst b/providers/src/airflow/providers/slack/CHANGELOG.rst similarity index 100% rename from airflow/providers/slack/CHANGELOG.rst rename to providers/src/airflow/providers/slack/CHANGELOG.rst diff --git a/airflow/providers/slack/__init__.py b/providers/src/airflow/providers/slack/__init__.py similarity index 100% rename from airflow/providers/slack/__init__.py rename to providers/src/airflow/providers/slack/__init__.py diff --git a/airflow/providers/segment/operators/__init__.py b/providers/src/airflow/providers/slack/hooks/__init__.py similarity index 100% rename from airflow/providers/segment/operators/__init__.py rename to providers/src/airflow/providers/slack/hooks/__init__.py diff --git a/airflow/providers/slack/hooks/slack.py b/providers/src/airflow/providers/slack/hooks/slack.py similarity index 100% rename from airflow/providers/slack/hooks/slack.py rename to providers/src/airflow/providers/slack/hooks/slack.py diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/providers/src/airflow/providers/slack/hooks/slack_webhook.py similarity index 100% rename from airflow/providers/slack/hooks/slack_webhook.py rename to providers/src/airflow/providers/slack/hooks/slack_webhook.py diff --git a/airflow/providers/slack/notifications/__init__.py b/providers/src/airflow/providers/slack/notifications/__init__.py similarity index 100% rename from airflow/providers/slack/notifications/__init__.py rename to providers/src/airflow/providers/slack/notifications/__init__.py diff --git a/airflow/providers/slack/notifications/slack.py b/providers/src/airflow/providers/slack/notifications/slack.py similarity index 100% rename from airflow/providers/slack/notifications/slack.py rename to providers/src/airflow/providers/slack/notifications/slack.py diff --git a/airflow/providers/slack/notifications/slack_notifier.py b/providers/src/airflow/providers/slack/notifications/slack_notifier.py similarity index 100% rename from airflow/providers/slack/notifications/slack_notifier.py rename to providers/src/airflow/providers/slack/notifications/slack_notifier.py diff --git a/airflow/providers/slack/notifications/slack_webhook.py b/providers/src/airflow/providers/slack/notifications/slack_webhook.py similarity index 100% rename from airflow/providers/slack/notifications/slack_webhook.py rename to providers/src/airflow/providers/slack/notifications/slack_webhook.py diff --git a/airflow/providers/singularity/operators/__init__.py b/providers/src/airflow/providers/slack/operators/__init__.py similarity index 100% rename from airflow/providers/singularity/operators/__init__.py rename to providers/src/airflow/providers/slack/operators/__init__.py diff --git a/airflow/providers/slack/operators/slack.py b/providers/src/airflow/providers/slack/operators/slack.py similarity index 100% rename from airflow/providers/slack/operators/slack.py rename to providers/src/airflow/providers/slack/operators/slack.py diff --git a/airflow/providers/slack/operators/slack_webhook.py b/providers/src/airflow/providers/slack/operators/slack_webhook.py similarity index 100% rename from airflow/providers/slack/operators/slack_webhook.py rename to providers/src/airflow/providers/slack/operators/slack_webhook.py diff --git a/airflow/providers/slack/provider.yaml b/providers/src/airflow/providers/slack/provider.yaml similarity index 100% rename from airflow/providers/slack/provider.yaml rename to providers/src/airflow/providers/slack/provider.yaml diff --git a/airflow/providers/slack/transfers/__init__.py b/providers/src/airflow/providers/slack/transfers/__init__.py similarity index 100% rename from airflow/providers/slack/transfers/__init__.py rename to providers/src/airflow/providers/slack/transfers/__init__.py diff --git a/airflow/providers/slack/transfers/base_sql_to_slack.py b/providers/src/airflow/providers/slack/transfers/base_sql_to_slack.py similarity index 100% rename from airflow/providers/slack/transfers/base_sql_to_slack.py rename to providers/src/airflow/providers/slack/transfers/base_sql_to_slack.py diff --git a/airflow/providers/slack/transfers/sql_to_slack.py b/providers/src/airflow/providers/slack/transfers/sql_to_slack.py similarity index 100% rename from airflow/providers/slack/transfers/sql_to_slack.py rename to providers/src/airflow/providers/slack/transfers/sql_to_slack.py diff --git a/airflow/providers/slack/transfers/sql_to_slack_webhook.py b/providers/src/airflow/providers/slack/transfers/sql_to_slack_webhook.py similarity index 100% rename from airflow/providers/slack/transfers/sql_to_slack_webhook.py rename to providers/src/airflow/providers/slack/transfers/sql_to_slack_webhook.py diff --git a/airflow/providers/slack/utils/__init__.py b/providers/src/airflow/providers/slack/utils/__init__.py similarity index 100% rename from airflow/providers/slack/utils/__init__.py rename to providers/src/airflow/providers/slack/utils/__init__.py diff --git a/airflow/providers/smtp/.latest-doc-only-change.txt b/providers/src/airflow/providers/smtp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/smtp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/smtp/.latest-doc-only-change.txt diff --git a/airflow/providers/smtp/CHANGELOG.rst b/providers/src/airflow/providers/smtp/CHANGELOG.rst similarity index 100% rename from airflow/providers/smtp/CHANGELOG.rst rename to providers/src/airflow/providers/smtp/CHANGELOG.rst diff --git a/airflow/providers/smtp/__init__.py b/providers/src/airflow/providers/smtp/__init__.py similarity index 100% rename from airflow/providers/smtp/__init__.py rename to providers/src/airflow/providers/smtp/__init__.py diff --git a/airflow/providers/slack/hooks/__init__.py b/providers/src/airflow/providers/smtp/hooks/__init__.py similarity index 100% rename from airflow/providers/slack/hooks/__init__.py rename to providers/src/airflow/providers/smtp/hooks/__init__.py diff --git a/airflow/providers/smtp/hooks/smtp.py b/providers/src/airflow/providers/smtp/hooks/smtp.py similarity index 100% rename from airflow/providers/smtp/hooks/smtp.py rename to providers/src/airflow/providers/smtp/hooks/smtp.py diff --git a/airflow/providers/smtp/notifications/__init__.py b/providers/src/airflow/providers/smtp/notifications/__init__.py similarity index 100% rename from airflow/providers/smtp/notifications/__init__.py rename to providers/src/airflow/providers/smtp/notifications/__init__.py diff --git a/airflow/providers/smtp/notifications/smtp.py b/providers/src/airflow/providers/smtp/notifications/smtp.py similarity index 100% rename from airflow/providers/smtp/notifications/smtp.py rename to providers/src/airflow/providers/smtp/notifications/smtp.py diff --git a/airflow/providers/smtp/notifications/templates/__init__.py b/providers/src/airflow/providers/smtp/notifications/templates/__init__.py similarity index 100% rename from airflow/providers/smtp/notifications/templates/__init__.py rename to providers/src/airflow/providers/smtp/notifications/templates/__init__.py diff --git a/airflow/providers/smtp/notifications/templates/email.html b/providers/src/airflow/providers/smtp/notifications/templates/email.html similarity index 100% rename from airflow/providers/smtp/notifications/templates/email.html rename to providers/src/airflow/providers/smtp/notifications/templates/email.html diff --git a/airflow/providers/smtp/notifications/templates/email_subject.jinja2 b/providers/src/airflow/providers/smtp/notifications/templates/email_subject.jinja2 similarity index 100% rename from airflow/providers/smtp/notifications/templates/email_subject.jinja2 rename to providers/src/airflow/providers/smtp/notifications/templates/email_subject.jinja2 diff --git a/airflow/providers/slack/operators/__init__.py b/providers/src/airflow/providers/smtp/operators/__init__.py similarity index 100% rename from airflow/providers/slack/operators/__init__.py rename to providers/src/airflow/providers/smtp/operators/__init__.py diff --git a/airflow/providers/smtp/operators/smtp.py b/providers/src/airflow/providers/smtp/operators/smtp.py similarity index 100% rename from airflow/providers/smtp/operators/smtp.py rename to providers/src/airflow/providers/smtp/operators/smtp.py diff --git a/airflow/providers/smtp/provider.yaml b/providers/src/airflow/providers/smtp/provider.yaml similarity index 100% rename from airflow/providers/smtp/provider.yaml rename to providers/src/airflow/providers/smtp/provider.yaml diff --git a/airflow/providers/snowflake/.latest-doc-only-change.txt b/providers/src/airflow/providers/snowflake/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/snowflake/.latest-doc-only-change.txt rename to providers/src/airflow/providers/snowflake/.latest-doc-only-change.txt diff --git a/airflow/providers/snowflake/CHANGELOG.rst b/providers/src/airflow/providers/snowflake/CHANGELOG.rst similarity index 100% rename from airflow/providers/snowflake/CHANGELOG.rst rename to providers/src/airflow/providers/snowflake/CHANGELOG.rst diff --git a/airflow/providers/snowflake/__init__.py b/providers/src/airflow/providers/snowflake/__init__.py similarity index 100% rename from airflow/providers/snowflake/__init__.py rename to providers/src/airflow/providers/snowflake/__init__.py diff --git a/airflow/providers/snowflake/decorators/__init__.py b/providers/src/airflow/providers/snowflake/decorators/__init__.py similarity index 100% rename from airflow/providers/snowflake/decorators/__init__.py rename to providers/src/airflow/providers/snowflake/decorators/__init__.py diff --git a/airflow/providers/snowflake/decorators/snowpark.py b/providers/src/airflow/providers/snowflake/decorators/snowpark.py similarity index 100% rename from airflow/providers/snowflake/decorators/snowpark.py rename to providers/src/airflow/providers/snowflake/decorators/snowpark.py diff --git a/airflow/providers/snowflake/hooks/__init__.py b/providers/src/airflow/providers/snowflake/hooks/__init__.py similarity index 100% rename from airflow/providers/snowflake/hooks/__init__.py rename to providers/src/airflow/providers/snowflake/hooks/__init__.py diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/providers/src/airflow/providers/snowflake/hooks/snowflake.py similarity index 100% rename from airflow/providers/snowflake/hooks/snowflake.py rename to providers/src/airflow/providers/snowflake/hooks/snowflake.py diff --git a/airflow/providers/snowflake/hooks/snowflake_sql_api.py b/providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py similarity index 99% rename from airflow/providers/snowflake/hooks/snowflake_sql_api.py rename to providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py index 5143f573c263..8770492d06ec 100644 --- a/airflow/providers/snowflake/hooks/snowflake_sql_api.py +++ b/providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py @@ -327,9 +327,10 @@ async def get_sql_api_query_status_async(self, query_id: str) -> dict[str, str | """ self.log.info("Retrieving status for query id %s", query_id) header, params, url = self.get_request_url_header_params(query_id) - async with aiohttp.ClientSession(headers=header) as session, session.get( - url, params=params - ) as response: + async with ( + aiohttp.ClientSession(headers=header) as session, + session.get(url, params=params) as response, + ): status_code = response.status resp = await response.json() return self._process_response(status_code, resp) diff --git a/airflow/providers/snowflake/operators/__init__.py b/providers/src/airflow/providers/snowflake/operators/__init__.py similarity index 100% rename from airflow/providers/snowflake/operators/__init__.py rename to providers/src/airflow/providers/snowflake/operators/__init__.py diff --git a/airflow/providers/snowflake/operators/snowflake.py b/providers/src/airflow/providers/snowflake/operators/snowflake.py similarity index 100% rename from airflow/providers/snowflake/operators/snowflake.py rename to providers/src/airflow/providers/snowflake/operators/snowflake.py diff --git a/airflow/providers/snowflake/operators/snowpark.py b/providers/src/airflow/providers/snowflake/operators/snowpark.py similarity index 100% rename from airflow/providers/snowflake/operators/snowpark.py rename to providers/src/airflow/providers/snowflake/operators/snowpark.py diff --git a/airflow/providers/snowflake/provider.yaml b/providers/src/airflow/providers/snowflake/provider.yaml similarity index 100% rename from airflow/providers/snowflake/provider.yaml rename to providers/src/airflow/providers/snowflake/provider.yaml diff --git a/airflow/providers/snowflake/transfers/__init__.py b/providers/src/airflow/providers/snowflake/transfers/__init__.py similarity index 100% rename from airflow/providers/snowflake/transfers/__init__.py rename to providers/src/airflow/providers/snowflake/transfers/__init__.py diff --git a/airflow/providers/snowflake/transfers/copy_into_snowflake.py b/providers/src/airflow/providers/snowflake/transfers/copy_into_snowflake.py similarity index 100% rename from airflow/providers/snowflake/transfers/copy_into_snowflake.py rename to providers/src/airflow/providers/snowflake/transfers/copy_into_snowflake.py diff --git a/airflow/providers/snowflake/triggers/__init__.py b/providers/src/airflow/providers/snowflake/triggers/__init__.py similarity index 100% rename from airflow/providers/snowflake/triggers/__init__.py rename to providers/src/airflow/providers/snowflake/triggers/__init__.py diff --git a/airflow/providers/snowflake/triggers/snowflake_trigger.py b/providers/src/airflow/providers/snowflake/triggers/snowflake_trigger.py similarity index 100% rename from airflow/providers/snowflake/triggers/snowflake_trigger.py rename to providers/src/airflow/providers/snowflake/triggers/snowflake_trigger.py diff --git a/airflow/providers/snowflake/utils/__init__.py b/providers/src/airflow/providers/snowflake/utils/__init__.py similarity index 100% rename from airflow/providers/snowflake/utils/__init__.py rename to providers/src/airflow/providers/snowflake/utils/__init__.py diff --git a/airflow/providers/snowflake/utils/common.py b/providers/src/airflow/providers/snowflake/utils/common.py similarity index 100% rename from airflow/providers/snowflake/utils/common.py rename to providers/src/airflow/providers/snowflake/utils/common.py diff --git a/airflow/providers/snowflake/utils/openlineage.py b/providers/src/airflow/providers/snowflake/utils/openlineage.py similarity index 100% rename from airflow/providers/snowflake/utils/openlineage.py rename to providers/src/airflow/providers/snowflake/utils/openlineage.py diff --git a/airflow/providers/snowflake/utils/snowpark.py b/providers/src/airflow/providers/snowflake/utils/snowpark.py similarity index 100% rename from airflow/providers/snowflake/utils/snowpark.py rename to providers/src/airflow/providers/snowflake/utils/snowpark.py diff --git a/airflow/providers/snowflake/utils/sql_api_generate_jwt.py b/providers/src/airflow/providers/snowflake/utils/sql_api_generate_jwt.py similarity index 100% rename from airflow/providers/snowflake/utils/sql_api_generate_jwt.py rename to providers/src/airflow/providers/snowflake/utils/sql_api_generate_jwt.py diff --git a/airflow/providers/sqlite/.latest-doc-only-change.txt b/providers/src/airflow/providers/sqlite/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sqlite/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sqlite/.latest-doc-only-change.txt diff --git a/airflow/providers/sqlite/CHANGELOG.rst b/providers/src/airflow/providers/sqlite/CHANGELOG.rst similarity index 100% rename from airflow/providers/sqlite/CHANGELOG.rst rename to providers/src/airflow/providers/sqlite/CHANGELOG.rst diff --git a/airflow/providers/sqlite/__init__.py b/providers/src/airflow/providers/sqlite/__init__.py similarity index 100% rename from airflow/providers/sqlite/__init__.py rename to providers/src/airflow/providers/sqlite/__init__.py diff --git a/airflow/providers/smtp/hooks/__init__.py b/providers/src/airflow/providers/sqlite/hooks/__init__.py similarity index 100% rename from airflow/providers/smtp/hooks/__init__.py rename to providers/src/airflow/providers/sqlite/hooks/__init__.py diff --git a/airflow/providers/sqlite/hooks/sqlite.py b/providers/src/airflow/providers/sqlite/hooks/sqlite.py similarity index 100% rename from airflow/providers/sqlite/hooks/sqlite.py rename to providers/src/airflow/providers/sqlite/hooks/sqlite.py diff --git a/airflow/providers/smtp/operators/__init__.py b/providers/src/airflow/providers/sqlite/operators/__init__.py similarity index 100% rename from airflow/providers/smtp/operators/__init__.py rename to providers/src/airflow/providers/sqlite/operators/__init__.py diff --git a/airflow/providers/sqlite/operators/sqlite.py b/providers/src/airflow/providers/sqlite/operators/sqlite.py similarity index 100% rename from airflow/providers/sqlite/operators/sqlite.py rename to providers/src/airflow/providers/sqlite/operators/sqlite.py diff --git a/airflow/providers/sqlite/provider.yaml b/providers/src/airflow/providers/sqlite/provider.yaml similarity index 100% rename from airflow/providers/sqlite/provider.yaml rename to providers/src/airflow/providers/sqlite/provider.yaml diff --git a/airflow/providers/ssh/.latest-doc-only-change.txt b/providers/src/airflow/providers/ssh/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/ssh/.latest-doc-only-change.txt rename to providers/src/airflow/providers/ssh/.latest-doc-only-change.txt diff --git a/airflow/providers/ssh/CHANGELOG.rst b/providers/src/airflow/providers/ssh/CHANGELOG.rst similarity index 100% rename from airflow/providers/ssh/CHANGELOG.rst rename to providers/src/airflow/providers/ssh/CHANGELOG.rst diff --git a/airflow/providers/ssh/__init__.py b/providers/src/airflow/providers/ssh/__init__.py similarity index 100% rename from airflow/providers/ssh/__init__.py rename to providers/src/airflow/providers/ssh/__init__.py diff --git a/airflow/providers/sqlite/hooks/__init__.py b/providers/src/airflow/providers/ssh/hooks/__init__.py similarity index 100% rename from airflow/providers/sqlite/hooks/__init__.py rename to providers/src/airflow/providers/ssh/hooks/__init__.py diff --git a/airflow/providers/ssh/hooks/ssh.py b/providers/src/airflow/providers/ssh/hooks/ssh.py similarity index 100% rename from airflow/providers/ssh/hooks/ssh.py rename to providers/src/airflow/providers/ssh/hooks/ssh.py diff --git a/airflow/providers/sqlite/operators/__init__.py b/providers/src/airflow/providers/ssh/operators/__init__.py similarity index 100% rename from airflow/providers/sqlite/operators/__init__.py rename to providers/src/airflow/providers/ssh/operators/__init__.py diff --git a/airflow/providers/ssh/operators/ssh.py b/providers/src/airflow/providers/ssh/operators/ssh.py similarity index 100% rename from airflow/providers/ssh/operators/ssh.py rename to providers/src/airflow/providers/ssh/operators/ssh.py diff --git a/airflow/providers/ssh/provider.yaml b/providers/src/airflow/providers/ssh/provider.yaml similarity index 100% rename from airflow/providers/ssh/provider.yaml rename to providers/src/airflow/providers/ssh/provider.yaml diff --git a/airflow/providers/standard/CHANGELOG.rst b/providers/src/airflow/providers/standard/CHANGELOG.rst similarity index 100% rename from airflow/providers/standard/CHANGELOG.rst rename to providers/src/airflow/providers/standard/CHANGELOG.rst diff --git a/airflow/providers/ssh/hooks/__init__.py b/providers/src/airflow/providers/standard/__init__.py similarity index 100% rename from airflow/providers/ssh/hooks/__init__.py rename to providers/src/airflow/providers/standard/__init__.py diff --git a/airflow/providers/standard/operators/__init__.py b/providers/src/airflow/providers/standard/operators/__init__.py similarity index 100% rename from airflow/providers/standard/operators/__init__.py rename to providers/src/airflow/providers/standard/operators/__init__.py diff --git a/airflow/providers/standard/operators/bash.py b/providers/src/airflow/providers/standard/operators/bash.py similarity index 100% rename from airflow/providers/standard/operators/bash.py rename to providers/src/airflow/providers/standard/operators/bash.py diff --git a/airflow/providers/standard/operators/datetime.py b/providers/src/airflow/providers/standard/operators/datetime.py similarity index 100% rename from airflow/providers/standard/operators/datetime.py rename to providers/src/airflow/providers/standard/operators/datetime.py diff --git a/airflow/providers/standard/operators/weekday.py b/providers/src/airflow/providers/standard/operators/weekday.py similarity index 100% rename from airflow/providers/standard/operators/weekday.py rename to providers/src/airflow/providers/standard/operators/weekday.py diff --git a/airflow/providers/standard/provider.yaml b/providers/src/airflow/providers/standard/provider.yaml similarity index 100% rename from airflow/providers/standard/provider.yaml rename to providers/src/airflow/providers/standard/provider.yaml diff --git a/airflow/providers/standard/sensors/__init__.py b/providers/src/airflow/providers/standard/sensors/__init__.py similarity index 100% rename from airflow/providers/standard/sensors/__init__.py rename to providers/src/airflow/providers/standard/sensors/__init__.py diff --git a/airflow/providers/standard/sensors/bash.py b/providers/src/airflow/providers/standard/sensors/bash.py similarity index 100% rename from airflow/providers/standard/sensors/bash.py rename to providers/src/airflow/providers/standard/sensors/bash.py diff --git a/airflow/providers/standard/sensors/date_time.py b/providers/src/airflow/providers/standard/sensors/date_time.py similarity index 100% rename from airflow/providers/standard/sensors/date_time.py rename to providers/src/airflow/providers/standard/sensors/date_time.py diff --git a/airflow/providers/standard/sensors/time.py b/providers/src/airflow/providers/standard/sensors/time.py similarity index 100% rename from airflow/providers/standard/sensors/time.py rename to providers/src/airflow/providers/standard/sensors/time.py diff --git a/airflow/providers/standard/sensors/time_delta.py b/providers/src/airflow/providers/standard/sensors/time_delta.py similarity index 100% rename from airflow/providers/standard/sensors/time_delta.py rename to providers/src/airflow/providers/standard/sensors/time_delta.py diff --git a/airflow/providers/standard/sensors/weekday.py b/providers/src/airflow/providers/standard/sensors/weekday.py similarity index 100% rename from airflow/providers/standard/sensors/weekday.py rename to providers/src/airflow/providers/standard/sensors/weekday.py diff --git a/airflow/providers/tableau/.latest-doc-only-change.txt b/providers/src/airflow/providers/tableau/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/tableau/.latest-doc-only-change.txt rename to providers/src/airflow/providers/tableau/.latest-doc-only-change.txt diff --git a/airflow/providers/tableau/CHANGELOG.rst b/providers/src/airflow/providers/tableau/CHANGELOG.rst similarity index 100% rename from airflow/providers/tableau/CHANGELOG.rst rename to providers/src/airflow/providers/tableau/CHANGELOG.rst diff --git a/airflow/providers/tableau/__init__.py b/providers/src/airflow/providers/tableau/__init__.py similarity index 100% rename from airflow/providers/tableau/__init__.py rename to providers/src/airflow/providers/tableau/__init__.py diff --git a/airflow/providers/ssh/operators/__init__.py b/providers/src/airflow/providers/tableau/hooks/__init__.py similarity index 100% rename from airflow/providers/ssh/operators/__init__.py rename to providers/src/airflow/providers/tableau/hooks/__init__.py diff --git a/airflow/providers/tableau/hooks/tableau.py b/providers/src/airflow/providers/tableau/hooks/tableau.py similarity index 100% rename from airflow/providers/tableau/hooks/tableau.py rename to providers/src/airflow/providers/tableau/hooks/tableau.py diff --git a/airflow/providers/tableau/operators/__init__.py b/providers/src/airflow/providers/tableau/operators/__init__.py similarity index 100% rename from airflow/providers/tableau/operators/__init__.py rename to providers/src/airflow/providers/tableau/operators/__init__.py diff --git a/airflow/providers/tableau/operators/tableau.py b/providers/src/airflow/providers/tableau/operators/tableau.py similarity index 100% rename from airflow/providers/tableau/operators/tableau.py rename to providers/src/airflow/providers/tableau/operators/tableau.py diff --git a/airflow/providers/tableau/provider.yaml b/providers/src/airflow/providers/tableau/provider.yaml similarity index 100% rename from airflow/providers/tableau/provider.yaml rename to providers/src/airflow/providers/tableau/provider.yaml diff --git a/airflow/providers/tableau/sensors/__init__.py b/providers/src/airflow/providers/tableau/sensors/__init__.py similarity index 100% rename from airflow/providers/tableau/sensors/__init__.py rename to providers/src/airflow/providers/tableau/sensors/__init__.py diff --git a/airflow/providers/tableau/sensors/tableau.py b/providers/src/airflow/providers/tableau/sensors/tableau.py similarity index 100% rename from airflow/providers/tableau/sensors/tableau.py rename to providers/src/airflow/providers/tableau/sensors/tableau.py diff --git a/airflow/providers/telegram/.latest-doc-only-change.txt b/providers/src/airflow/providers/telegram/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/telegram/.latest-doc-only-change.txt rename to providers/src/airflow/providers/telegram/.latest-doc-only-change.txt diff --git a/airflow/providers/telegram/CHANGELOG.rst b/providers/src/airflow/providers/telegram/CHANGELOG.rst similarity index 100% rename from airflow/providers/telegram/CHANGELOG.rst rename to providers/src/airflow/providers/telegram/CHANGELOG.rst diff --git a/airflow/providers/telegram/__init__.py b/providers/src/airflow/providers/telegram/__init__.py similarity index 100% rename from airflow/providers/telegram/__init__.py rename to providers/src/airflow/providers/telegram/__init__.py diff --git a/airflow/providers/standard/__init__.py b/providers/src/airflow/providers/telegram/hooks/__init__.py similarity index 100% rename from airflow/providers/standard/__init__.py rename to providers/src/airflow/providers/telegram/hooks/__init__.py diff --git a/airflow/providers/telegram/hooks/telegram.py b/providers/src/airflow/providers/telegram/hooks/telegram.py similarity index 100% rename from airflow/providers/telegram/hooks/telegram.py rename to providers/src/airflow/providers/telegram/hooks/telegram.py diff --git a/airflow/providers/tableau/hooks/__init__.py b/providers/src/airflow/providers/telegram/operators/__init__.py similarity index 100% rename from airflow/providers/tableau/hooks/__init__.py rename to providers/src/airflow/providers/telegram/operators/__init__.py diff --git a/airflow/providers/telegram/operators/telegram.py b/providers/src/airflow/providers/telegram/operators/telegram.py similarity index 100% rename from airflow/providers/telegram/operators/telegram.py rename to providers/src/airflow/providers/telegram/operators/telegram.py diff --git a/airflow/providers/telegram/provider.yaml b/providers/src/airflow/providers/telegram/provider.yaml similarity index 100% rename from airflow/providers/telegram/provider.yaml rename to providers/src/airflow/providers/telegram/provider.yaml diff --git a/airflow/providers/teradata/.latest-doc-only-change.txt b/providers/src/airflow/providers/teradata/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/teradata/.latest-doc-only-change.txt rename to providers/src/airflow/providers/teradata/.latest-doc-only-change.txt diff --git a/airflow/providers/teradata/CHANGELOG.rst b/providers/src/airflow/providers/teradata/CHANGELOG.rst similarity index 100% rename from airflow/providers/teradata/CHANGELOG.rst rename to providers/src/airflow/providers/teradata/CHANGELOG.rst diff --git a/airflow/providers/teradata/__init__.py b/providers/src/airflow/providers/teradata/__init__.py similarity index 100% rename from airflow/providers/teradata/__init__.py rename to providers/src/airflow/providers/teradata/__init__.py diff --git a/airflow/providers/telegram/hooks/__init__.py b/providers/src/airflow/providers/teradata/hooks/__init__.py similarity index 100% rename from airflow/providers/telegram/hooks/__init__.py rename to providers/src/airflow/providers/teradata/hooks/__init__.py diff --git a/airflow/providers/teradata/hooks/teradata.py b/providers/src/airflow/providers/teradata/hooks/teradata.py similarity index 100% rename from airflow/providers/teradata/hooks/teradata.py rename to providers/src/airflow/providers/teradata/hooks/teradata.py diff --git a/airflow/providers/telegram/operators/__init__.py b/providers/src/airflow/providers/teradata/operators/__init__.py similarity index 100% rename from airflow/providers/telegram/operators/__init__.py rename to providers/src/airflow/providers/teradata/operators/__init__.py diff --git a/airflow/providers/teradata/operators/teradata.py b/providers/src/airflow/providers/teradata/operators/teradata.py similarity index 100% rename from airflow/providers/teradata/operators/teradata.py rename to providers/src/airflow/providers/teradata/operators/teradata.py diff --git a/airflow/providers/teradata/operators/teradata_compute_cluster.py b/providers/src/airflow/providers/teradata/operators/teradata_compute_cluster.py similarity index 100% rename from airflow/providers/teradata/operators/teradata_compute_cluster.py rename to providers/src/airflow/providers/teradata/operators/teradata_compute_cluster.py diff --git a/airflow/providers/teradata/provider.yaml b/providers/src/airflow/providers/teradata/provider.yaml similarity index 100% rename from airflow/providers/teradata/provider.yaml rename to providers/src/airflow/providers/teradata/provider.yaml diff --git a/airflow/providers/teradata/transfers/__init__.py b/providers/src/airflow/providers/teradata/transfers/__init__.py similarity index 100% rename from airflow/providers/teradata/transfers/__init__.py rename to providers/src/airflow/providers/teradata/transfers/__init__.py diff --git a/airflow/providers/teradata/transfers/azure_blob_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/azure_blob_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py diff --git a/airflow/providers/teradata/transfers/s3_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/s3_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/s3_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/s3_to_teradata.py diff --git a/airflow/providers/teradata/transfers/teradata_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/teradata_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/teradata_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/teradata_to_teradata.py diff --git a/airflow/providers/teradata/triggers/__init__.py b/providers/src/airflow/providers/teradata/triggers/__init__.py similarity index 100% rename from airflow/providers/teradata/triggers/__init__.py rename to providers/src/airflow/providers/teradata/triggers/__init__.py diff --git a/airflow/providers/teradata/triggers/teradata_compute_cluster.py b/providers/src/airflow/providers/teradata/triggers/teradata_compute_cluster.py similarity index 100% rename from airflow/providers/teradata/triggers/teradata_compute_cluster.py rename to providers/src/airflow/providers/teradata/triggers/teradata_compute_cluster.py diff --git a/airflow/providers/teradata/utils/__init__.py b/providers/src/airflow/providers/teradata/utils/__init__.py similarity index 100% rename from airflow/providers/teradata/utils/__init__.py rename to providers/src/airflow/providers/teradata/utils/__init__.py diff --git a/airflow/providers/teradata/utils/constants.py b/providers/src/airflow/providers/teradata/utils/constants.py similarity index 100% rename from airflow/providers/teradata/utils/constants.py rename to providers/src/airflow/providers/teradata/utils/constants.py diff --git a/airflow/providers/trino/.latest-doc-only-change.txt b/providers/src/airflow/providers/trino/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/trino/.latest-doc-only-change.txt rename to providers/src/airflow/providers/trino/.latest-doc-only-change.txt diff --git a/airflow/providers/trino/CHANGELOG.rst b/providers/src/airflow/providers/trino/CHANGELOG.rst similarity index 100% rename from airflow/providers/trino/CHANGELOG.rst rename to providers/src/airflow/providers/trino/CHANGELOG.rst diff --git a/airflow/providers/trino/__init__.py b/providers/src/airflow/providers/trino/__init__.py similarity index 100% rename from airflow/providers/trino/__init__.py rename to providers/src/airflow/providers/trino/__init__.py diff --git a/airflow/providers/trino/assets/__init__.py b/providers/src/airflow/providers/trino/assets/__init__.py similarity index 100% rename from airflow/providers/trino/assets/__init__.py rename to providers/src/airflow/providers/trino/assets/__init__.py diff --git a/airflow/providers/trino/assets/trino.py b/providers/src/airflow/providers/trino/assets/trino.py similarity index 100% rename from airflow/providers/trino/assets/trino.py rename to providers/src/airflow/providers/trino/assets/trino.py diff --git a/airflow/providers/teradata/hooks/__init__.py b/providers/src/airflow/providers/trino/hooks/__init__.py similarity index 100% rename from airflow/providers/teradata/hooks/__init__.py rename to providers/src/airflow/providers/trino/hooks/__init__.py diff --git a/airflow/providers/trino/hooks/trino.py b/providers/src/airflow/providers/trino/hooks/trino.py similarity index 100% rename from airflow/providers/trino/hooks/trino.py rename to providers/src/airflow/providers/trino/hooks/trino.py diff --git a/airflow/providers/trino/operators/__init__.py b/providers/src/airflow/providers/trino/operators/__init__.py similarity index 100% rename from airflow/providers/trino/operators/__init__.py rename to providers/src/airflow/providers/trino/operators/__init__.py diff --git a/airflow/providers/trino/operators/trino.py b/providers/src/airflow/providers/trino/operators/trino.py similarity index 100% rename from airflow/providers/trino/operators/trino.py rename to providers/src/airflow/providers/trino/operators/trino.py diff --git a/airflow/providers/trino/provider.yaml b/providers/src/airflow/providers/trino/provider.yaml similarity index 100% rename from airflow/providers/trino/provider.yaml rename to providers/src/airflow/providers/trino/provider.yaml diff --git a/airflow/providers/teradata/operators/__init__.py b/providers/src/airflow/providers/trino/transfers/__init__.py similarity index 100% rename from airflow/providers/teradata/operators/__init__.py rename to providers/src/airflow/providers/trino/transfers/__init__.py diff --git a/airflow/providers/trino/transfers/gcs_to_trino.py b/providers/src/airflow/providers/trino/transfers/gcs_to_trino.py similarity index 100% rename from airflow/providers/trino/transfers/gcs_to_trino.py rename to providers/src/airflow/providers/trino/transfers/gcs_to_trino.py diff --git a/airflow/providers/vertica/.latest-doc-only-change.txt b/providers/src/airflow/providers/vertica/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/vertica/.latest-doc-only-change.txt rename to providers/src/airflow/providers/vertica/.latest-doc-only-change.txt diff --git a/airflow/providers/vertica/CHANGELOG.rst b/providers/src/airflow/providers/vertica/CHANGELOG.rst similarity index 100% rename from airflow/providers/vertica/CHANGELOG.rst rename to providers/src/airflow/providers/vertica/CHANGELOG.rst diff --git a/airflow/providers/vertica/__init__.py b/providers/src/airflow/providers/vertica/__init__.py similarity index 100% rename from airflow/providers/vertica/__init__.py rename to providers/src/airflow/providers/vertica/__init__.py diff --git a/airflow/providers/trino/hooks/__init__.py b/providers/src/airflow/providers/vertica/hooks/__init__.py similarity index 100% rename from airflow/providers/trino/hooks/__init__.py rename to providers/src/airflow/providers/vertica/hooks/__init__.py diff --git a/airflow/providers/vertica/hooks/vertica.py b/providers/src/airflow/providers/vertica/hooks/vertica.py similarity index 100% rename from airflow/providers/vertica/hooks/vertica.py rename to providers/src/airflow/providers/vertica/hooks/vertica.py diff --git a/airflow/providers/trino/transfers/__init__.py b/providers/src/airflow/providers/vertica/operators/__init__.py similarity index 100% rename from airflow/providers/trino/transfers/__init__.py rename to providers/src/airflow/providers/vertica/operators/__init__.py diff --git a/airflow/providers/vertica/operators/vertica.py b/providers/src/airflow/providers/vertica/operators/vertica.py similarity index 100% rename from airflow/providers/vertica/operators/vertica.py rename to providers/src/airflow/providers/vertica/operators/vertica.py diff --git a/airflow/providers/vertica/provider.yaml b/providers/src/airflow/providers/vertica/provider.yaml similarity index 100% rename from airflow/providers/vertica/provider.yaml rename to providers/src/airflow/providers/vertica/provider.yaml diff --git a/airflow/providers/weaviate/.latest-doc-only-change.txt b/providers/src/airflow/providers/weaviate/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/weaviate/.latest-doc-only-change.txt rename to providers/src/airflow/providers/weaviate/.latest-doc-only-change.txt diff --git a/airflow/providers/weaviate/CHANGELOG.rst b/providers/src/airflow/providers/weaviate/CHANGELOG.rst similarity index 100% rename from airflow/providers/weaviate/CHANGELOG.rst rename to providers/src/airflow/providers/weaviate/CHANGELOG.rst diff --git a/airflow/providers/weaviate/__init__.py b/providers/src/airflow/providers/weaviate/__init__.py similarity index 100% rename from airflow/providers/weaviate/__init__.py rename to providers/src/airflow/providers/weaviate/__init__.py diff --git a/airflow/providers/weaviate/hooks/__init__.py b/providers/src/airflow/providers/weaviate/hooks/__init__.py similarity index 100% rename from airflow/providers/weaviate/hooks/__init__.py rename to providers/src/airflow/providers/weaviate/hooks/__init__.py diff --git a/airflow/providers/weaviate/hooks/weaviate.py b/providers/src/airflow/providers/weaviate/hooks/weaviate.py similarity index 100% rename from airflow/providers/weaviate/hooks/weaviate.py rename to providers/src/airflow/providers/weaviate/hooks/weaviate.py diff --git a/airflow/providers/weaviate/operators/__init__.py b/providers/src/airflow/providers/weaviate/operators/__init__.py similarity index 100% rename from airflow/providers/weaviate/operators/__init__.py rename to providers/src/airflow/providers/weaviate/operators/__init__.py diff --git a/airflow/providers/weaviate/operators/weaviate.py b/providers/src/airflow/providers/weaviate/operators/weaviate.py similarity index 100% rename from airflow/providers/weaviate/operators/weaviate.py rename to providers/src/airflow/providers/weaviate/operators/weaviate.py diff --git a/airflow/providers/weaviate/provider.yaml b/providers/src/airflow/providers/weaviate/provider.yaml similarity index 100% rename from airflow/providers/weaviate/provider.yaml rename to providers/src/airflow/providers/weaviate/provider.yaml diff --git a/airflow/providers/yandex/.latest-doc-only-change.txt b/providers/src/airflow/providers/yandex/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/yandex/.latest-doc-only-change.txt rename to providers/src/airflow/providers/yandex/.latest-doc-only-change.txt diff --git a/airflow/providers/yandex/CHANGELOG.rst b/providers/src/airflow/providers/yandex/CHANGELOG.rst similarity index 100% rename from airflow/providers/yandex/CHANGELOG.rst rename to providers/src/airflow/providers/yandex/CHANGELOG.rst diff --git a/airflow/providers/yandex/__init__.py b/providers/src/airflow/providers/yandex/__init__.py similarity index 100% rename from airflow/providers/yandex/__init__.py rename to providers/src/airflow/providers/yandex/__init__.py diff --git a/airflow/providers/yandex/hooks/__init__.py b/providers/src/airflow/providers/yandex/hooks/__init__.py similarity index 100% rename from airflow/providers/yandex/hooks/__init__.py rename to providers/src/airflow/providers/yandex/hooks/__init__.py diff --git a/airflow/providers/yandex/hooks/dataproc.py b/providers/src/airflow/providers/yandex/hooks/dataproc.py similarity index 100% rename from airflow/providers/yandex/hooks/dataproc.py rename to providers/src/airflow/providers/yandex/hooks/dataproc.py diff --git a/airflow/providers/yandex/hooks/yandex.py b/providers/src/airflow/providers/yandex/hooks/yandex.py similarity index 100% rename from airflow/providers/yandex/hooks/yandex.py rename to providers/src/airflow/providers/yandex/hooks/yandex.py diff --git a/airflow/providers/yandex/hooks/yandexcloud_dataproc.py b/providers/src/airflow/providers/yandex/hooks/yandexcloud_dataproc.py similarity index 100% rename from airflow/providers/yandex/hooks/yandexcloud_dataproc.py rename to providers/src/airflow/providers/yandex/hooks/yandexcloud_dataproc.py diff --git a/airflow/providers/yandex/hooks/yq.py b/providers/src/airflow/providers/yandex/hooks/yq.py similarity index 100% rename from airflow/providers/yandex/hooks/yq.py rename to providers/src/airflow/providers/yandex/hooks/yq.py diff --git a/airflow/providers/yandex/links/__init__.py b/providers/src/airflow/providers/yandex/links/__init__.py similarity index 100% rename from airflow/providers/yandex/links/__init__.py rename to providers/src/airflow/providers/yandex/links/__init__.py diff --git a/airflow/providers/yandex/links/yq.py b/providers/src/airflow/providers/yandex/links/yq.py similarity index 100% rename from airflow/providers/yandex/links/yq.py rename to providers/src/airflow/providers/yandex/links/yq.py diff --git a/airflow/providers/yandex/operators/__init__.py b/providers/src/airflow/providers/yandex/operators/__init__.py similarity index 100% rename from airflow/providers/yandex/operators/__init__.py rename to providers/src/airflow/providers/yandex/operators/__init__.py diff --git a/airflow/providers/yandex/operators/dataproc.py b/providers/src/airflow/providers/yandex/operators/dataproc.py similarity index 100% rename from airflow/providers/yandex/operators/dataproc.py rename to providers/src/airflow/providers/yandex/operators/dataproc.py diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/providers/src/airflow/providers/yandex/operators/yandexcloud_dataproc.py similarity index 100% rename from airflow/providers/yandex/operators/yandexcloud_dataproc.py rename to providers/src/airflow/providers/yandex/operators/yandexcloud_dataproc.py diff --git a/airflow/providers/yandex/operators/yq.py b/providers/src/airflow/providers/yandex/operators/yq.py similarity index 100% rename from airflow/providers/yandex/operators/yq.py rename to providers/src/airflow/providers/yandex/operators/yq.py diff --git a/airflow/providers/yandex/provider.yaml b/providers/src/airflow/providers/yandex/provider.yaml similarity index 100% rename from airflow/providers/yandex/provider.yaml rename to providers/src/airflow/providers/yandex/provider.yaml diff --git a/airflow/providers/yandex/secrets/__init__.py b/providers/src/airflow/providers/yandex/secrets/__init__.py similarity index 100% rename from airflow/providers/yandex/secrets/__init__.py rename to providers/src/airflow/providers/yandex/secrets/__init__.py diff --git a/airflow/providers/yandex/secrets/lockbox.py b/providers/src/airflow/providers/yandex/secrets/lockbox.py similarity index 100% rename from airflow/providers/yandex/secrets/lockbox.py rename to providers/src/airflow/providers/yandex/secrets/lockbox.py diff --git a/airflow/providers/yandex/utils/__init__.py b/providers/src/airflow/providers/yandex/utils/__init__.py similarity index 100% rename from airflow/providers/yandex/utils/__init__.py rename to providers/src/airflow/providers/yandex/utils/__init__.py diff --git a/airflow/providers/yandex/utils/credentials.py b/providers/src/airflow/providers/yandex/utils/credentials.py similarity index 100% rename from airflow/providers/yandex/utils/credentials.py rename to providers/src/airflow/providers/yandex/utils/credentials.py diff --git a/airflow/providers/yandex/utils/defaults.py b/providers/src/airflow/providers/yandex/utils/defaults.py similarity index 100% rename from airflow/providers/yandex/utils/defaults.py rename to providers/src/airflow/providers/yandex/utils/defaults.py diff --git a/airflow/providers/yandex/utils/fields.py b/providers/src/airflow/providers/yandex/utils/fields.py similarity index 100% rename from airflow/providers/yandex/utils/fields.py rename to providers/src/airflow/providers/yandex/utils/fields.py diff --git a/airflow/providers/yandex/utils/user_agent.py b/providers/src/airflow/providers/yandex/utils/user_agent.py similarity index 100% rename from airflow/providers/yandex/utils/user_agent.py rename to providers/src/airflow/providers/yandex/utils/user_agent.py diff --git a/airflow/providers/ydb/CHANGELOG.rst b/providers/src/airflow/providers/ydb/CHANGELOG.rst similarity index 100% rename from airflow/providers/ydb/CHANGELOG.rst rename to providers/src/airflow/providers/ydb/CHANGELOG.rst diff --git a/airflow/providers/ydb/__init__.py b/providers/src/airflow/providers/ydb/__init__.py similarity index 100% rename from airflow/providers/ydb/__init__.py rename to providers/src/airflow/providers/ydb/__init__.py diff --git a/airflow/providers/ydb/hooks/__init__.py b/providers/src/airflow/providers/ydb/hooks/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/__init__.py rename to providers/src/airflow/providers/ydb/hooks/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/__init__.py b/providers/src/airflow/providers/ydb/hooks/_vendor/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/__init__.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/connection.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/constants.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/errors.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py diff --git a/airflow/providers/ydb/hooks/_vendor/readme.md b/providers/src/airflow/providers/ydb/hooks/_vendor/readme.md similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/readme.md rename to providers/src/airflow/providers/ydb/hooks/_vendor/readme.md diff --git a/airflow/providers/ydb/hooks/ydb.py b/providers/src/airflow/providers/ydb/hooks/ydb.py similarity index 100% rename from airflow/providers/ydb/hooks/ydb.py rename to providers/src/airflow/providers/ydb/hooks/ydb.py diff --git a/airflow/providers/ydb/operators/__init__.py b/providers/src/airflow/providers/ydb/operators/__init__.py similarity index 100% rename from airflow/providers/ydb/operators/__init__.py rename to providers/src/airflow/providers/ydb/operators/__init__.py diff --git a/airflow/providers/ydb/operators/ydb.py b/providers/src/airflow/providers/ydb/operators/ydb.py similarity index 100% rename from airflow/providers/ydb/operators/ydb.py rename to providers/src/airflow/providers/ydb/operators/ydb.py diff --git a/airflow/providers/ydb/provider.yaml b/providers/src/airflow/providers/ydb/provider.yaml similarity index 100% rename from airflow/providers/ydb/provider.yaml rename to providers/src/airflow/providers/ydb/provider.yaml diff --git a/airflow/providers/ydb/utils/__init__.py b/providers/src/airflow/providers/ydb/utils/__init__.py similarity index 100% rename from airflow/providers/ydb/utils/__init__.py rename to providers/src/airflow/providers/ydb/utils/__init__.py diff --git a/airflow/providers/ydb/utils/credentials.py b/providers/src/airflow/providers/ydb/utils/credentials.py similarity index 100% rename from airflow/providers/ydb/utils/credentials.py rename to providers/src/airflow/providers/ydb/utils/credentials.py diff --git a/airflow/providers/ydb/utils/defaults.py b/providers/src/airflow/providers/ydb/utils/defaults.py similarity index 100% rename from airflow/providers/ydb/utils/defaults.py rename to providers/src/airflow/providers/ydb/utils/defaults.py diff --git a/airflow/providers/zendesk/.latest-doc-only-change.txt b/providers/src/airflow/providers/zendesk/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/zendesk/.latest-doc-only-change.txt rename to providers/src/airflow/providers/zendesk/.latest-doc-only-change.txt diff --git a/airflow/providers/zendesk/CHANGELOG.rst b/providers/src/airflow/providers/zendesk/CHANGELOG.rst similarity index 100% rename from airflow/providers/zendesk/CHANGELOG.rst rename to providers/src/airflow/providers/zendesk/CHANGELOG.rst diff --git a/airflow/providers/zendesk/__init__.py b/providers/src/airflow/providers/zendesk/__init__.py similarity index 100% rename from airflow/providers/zendesk/__init__.py rename to providers/src/airflow/providers/zendesk/__init__.py diff --git a/airflow/providers/vertica/hooks/__init__.py b/providers/src/airflow/providers/zendesk/hooks/__init__.py similarity index 100% rename from airflow/providers/vertica/hooks/__init__.py rename to providers/src/airflow/providers/zendesk/hooks/__init__.py diff --git a/airflow/providers/zendesk/hooks/zendesk.py b/providers/src/airflow/providers/zendesk/hooks/zendesk.py similarity index 100% rename from airflow/providers/zendesk/hooks/zendesk.py rename to providers/src/airflow/providers/zendesk/hooks/zendesk.py diff --git a/airflow/providers/zendesk/provider.yaml b/providers/src/airflow/providers/zendesk/provider.yaml similarity index 100% rename from airflow/providers/zendesk/provider.yaml rename to providers/src/airflow/providers/zendesk/provider.yaml diff --git a/tests/_internals/__init__.py b/providers/tests/__init__.py similarity index 100% rename from tests/_internals/__init__.py rename to providers/tests/__init__.py diff --git a/tests/integration/providers/__init__.py b/providers/tests/airbyte/__init__.py similarity index 100% rename from tests/integration/providers/__init__.py rename to providers/tests/airbyte/__init__.py diff --git a/tests/integration/providers/apache/__init__.py b/providers/tests/airbyte/hooks/__init__.py similarity index 100% rename from tests/integration/providers/apache/__init__.py rename to providers/tests/airbyte/hooks/__init__.py diff --git a/tests/providers/airbyte/hooks/test_airbyte.py b/providers/tests/airbyte/hooks/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/hooks/test_airbyte.py rename to providers/tests/airbyte/hooks/test_airbyte.py diff --git a/tests/integration/providers/apache/cassandra/__init__.py b/providers/tests/airbyte/operators/__init__.py similarity index 100% rename from tests/integration/providers/apache/cassandra/__init__.py rename to providers/tests/airbyte/operators/__init__.py diff --git a/tests/providers/airbyte/operators/test_airbyte.py b/providers/tests/airbyte/operators/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/operators/test_airbyte.py rename to providers/tests/airbyte/operators/test_airbyte.py diff --git a/tests/integration/providers/apache/cassandra/hooks/__init__.py b/providers/tests/airbyte/sensors/__init__.py similarity index 100% rename from tests/integration/providers/apache/cassandra/hooks/__init__.py rename to providers/tests/airbyte/sensors/__init__.py diff --git a/tests/providers/airbyte/sensors/test_airbyte.py b/providers/tests/airbyte/sensors/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/sensors/test_airbyte.py rename to providers/tests/airbyte/sensors/test_airbyte.py diff --git a/tests/integration/providers/apache/drill/__init__.py b/providers/tests/airbyte/triggers/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/__init__.py rename to providers/tests/airbyte/triggers/__init__.py diff --git a/tests/providers/airbyte/triggers/test_airbyte.py b/providers/tests/airbyte/triggers/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/triggers/test_airbyte.py rename to providers/tests/airbyte/triggers/test_airbyte.py diff --git a/tests/integration/providers/apache/drill/hooks/__init__.py b/providers/tests/alibaba/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/hooks/__init__.py rename to providers/tests/alibaba/__init__.py diff --git a/tests/integration/providers/apache/drill/operators/__init__.py b/providers/tests/alibaba/cloud/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/operators/__init__.py rename to providers/tests/alibaba/cloud/__init__.py diff --git a/tests/integration/providers/apache/hive/__init__.py b/providers/tests/alibaba/cloud/hooks/__init__.py similarity index 100% rename from tests/integration/providers/apache/hive/__init__.py rename to providers/tests/alibaba/cloud/hooks/__init__.py diff --git a/tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py similarity index 99% rename from tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py index bf38a3f7ca66..b3e9100e2e59 100644 --- a/tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py +++ b/providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py @@ -34,7 +34,8 @@ ) from airflow.providers.alibaba.cloud.hooks.analyticdb_spark import AnalyticDBSparkHook -from tests.providers.alibaba.cloud.utils.analyticdb_spark_mock import mock_adb_spark_hook_default_project_id + +from providers.tests.alibaba.cloud.utils.analyticdb_spark_mock import mock_adb_spark_hook_default_project_id ADB_SPARK_STRING = "airflow.providers.alibaba.cloud.hooks.analyticdb_spark.{}" MOCK_ADB_SPARK_CONN_ID = "mock_id" diff --git a/tests/providers/alibaba/cloud/hooks/test_oss.py b/providers/tests/alibaba/cloud/hooks/test_oss.py similarity index 99% rename from tests/providers/alibaba/cloud/hooks/test_oss.py rename to providers/tests/alibaba/cloud/hooks/test_oss.py index 1c47aa10c974..2bb53dddece5 100644 --- a/tests/providers/alibaba/cloud/hooks/test_oss.py +++ b/providers/tests/alibaba/cloud/hooks/test_oss.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.alibaba.cloud.hooks.oss import OSSHook -from tests.providers.alibaba.cloud.utils.oss_mock import mock_oss_hook_default_project_id + +from providers.tests.alibaba.cloud.utils.oss_mock import mock_oss_hook_default_project_id OSS_STRING = "airflow.providers.alibaba.cloud.hooks.oss.{}" MOCK_OSS_CONN_ID = "mock_id" diff --git a/tests/integration/providers/apache/hive/transfers/__init__.py b/providers/tests/alibaba/cloud/log/__init__.py similarity index 100% rename from tests/integration/providers/apache/hive/transfers/__init__.py rename to providers/tests/alibaba/cloud/log/__init__.py diff --git a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py similarity index 98% rename from tests/providers/alibaba/cloud/log/test_oss_task_handler.py rename to providers/tests/alibaba/cloud/log/test_oss_task_handler.py index b17d95341011..18abe57aa09b 100644 --- a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py +++ b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py @@ -26,8 +26,9 @@ from airflow.providers.alibaba.cloud.log.oss_task_handler import OSSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/integration/providers/apache/kafka/__init__.py b/providers/tests/alibaba/cloud/operators/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/__init__.py rename to providers/tests/alibaba/cloud/operators/__init__.py diff --git a/tests/providers/alibaba/cloud/operators/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/operators/test_analyticdb_spark.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/operators/test_analyticdb_spark.py diff --git a/tests/providers/alibaba/cloud/operators/test_oss.py b/providers/tests/alibaba/cloud/operators/test_oss.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/test_oss.py rename to providers/tests/alibaba/cloud/operators/test_oss.py diff --git a/tests/integration/providers/apache/kafka/hooks/__init__.py b/providers/tests/alibaba/cloud/sensors/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/__init__.py rename to providers/tests/alibaba/cloud/sensors/__init__.py diff --git a/tests/providers/alibaba/cloud/sensors/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/sensors/test_analyticdb_spark.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/sensors/test_analyticdb_spark.py diff --git a/tests/providers/alibaba/cloud/sensors/test_oss_key.py b/providers/tests/alibaba/cloud/sensors/test_oss_key.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/test_oss_key.py rename to providers/tests/alibaba/cloud/sensors/test_oss_key.py diff --git a/tests/integration/providers/apache/kafka/operators/__init__.py b/providers/tests/alibaba/cloud/utils/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/__init__.py rename to providers/tests/alibaba/cloud/utils/__init__.py diff --git a/tests/providers/alibaba/cloud/utils/analyticdb_spark_mock.py b/providers/tests/alibaba/cloud/utils/analyticdb_spark_mock.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/analyticdb_spark_mock.py rename to providers/tests/alibaba/cloud/utils/analyticdb_spark_mock.py diff --git a/tests/providers/alibaba/cloud/utils/oss_mock.py b/providers/tests/alibaba/cloud/utils/oss_mock.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/oss_mock.py rename to providers/tests/alibaba/cloud/utils/oss_mock.py diff --git a/tests/providers/alibaba/cloud/utils/test_utils.py b/providers/tests/alibaba/cloud/utils/test_utils.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/test_utils.py rename to providers/tests/alibaba/cloud/utils/test_utils.py diff --git a/tests/integration/providers/apache/kafka/sensors/__init__.py b/providers/tests/amazon/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/sensors/__init__.py rename to providers/tests/amazon/__init__.py diff --git a/tests/providers/amazon/aws/.gitignore b/providers/tests/amazon/aws/.gitignore similarity index 100% rename from tests/providers/amazon/aws/.gitignore rename to providers/tests/amazon/aws/.gitignore diff --git a/tests/integration/providers/apache/kafka/triggers/__init__.py b/providers/tests/amazon/aws/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/triggers/__init__.py rename to providers/tests/amazon/aws/__init__.py diff --git a/tests/integration/providers/apache/pinot/__init__.py b/providers/tests/amazon/aws/assets/__init__.py similarity index 100% rename from tests/integration/providers/apache/pinot/__init__.py rename to providers/tests/amazon/aws/assets/__init__.py diff --git a/tests/providers/amazon/aws/assets/test_s3.py b/providers/tests/amazon/aws/assets/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/assets/test_s3.py rename to providers/tests/amazon/aws/assets/test_s3.py diff --git a/tests/integration/providers/apache/pinot/hooks/__init__.py b/providers/tests/amazon/aws/auth_manager/__init__.py similarity index 100% rename from tests/integration/providers/apache/pinot/hooks/__init__.py rename to providers/tests/amazon/aws/auth_manager/__init__.py diff --git a/tests/integration/providers/google/__init__.py b/providers/tests/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from tests/integration/providers/google/__init__.py rename to providers/tests/amazon/aws/auth_manager/avp/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/test_entities.py b/providers/tests/amazon/aws/auth_manager/avp/test_entities.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/avp/test_entities.py rename to providers/tests/amazon/aws/auth_manager/avp/test_entities.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/test_facade.py b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py similarity index 98% rename from tests/providers/amazon/aws/auth_manager/avp/test_facade.py rename to providers/tests/amazon/aws/auth_manager/avp/test_facade.py index 5c632ac1ba8e..3d2a0195039d 100644 --- a/tests/providers/amazon/aws/auth_manager/avp/test_facade.py +++ b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser from airflow.utils.helpers import prune_dict -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod @@ -311,8 +312,8 @@ def test_get_batch_is_authorized_single_result_unsuccessful(self, facade): user=test_user, ) - def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, airflow_root_path): - schema_path = airflow_root_path.joinpath( + def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, providers_src_folder): + schema_path = providers_src_folder.joinpath( "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" ).resolve() with open(schema_path) as schema_file: @@ -322,8 +323,8 @@ def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, assert facade.is_policy_store_schema_up_to_date() - def test_is_policy_store_schema_up_to_date_when_schema_is_modified(self, facade, airflow_root_path): - schema_path = airflow_root_path.joinpath( + def test_is_policy_store_schema_up_to_date_when_schema_is_modified(self, facade, providers_src_folder): + schema_path = providers_src_folder.joinpath( "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" ).resolve() with open(schema_path) as schema_file: diff --git a/tests/integration/providers/google/cloud/__init__.py b/providers/tests/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from tests/integration/providers/google/cloud/__init__.py rename to providers/tests/amazon/aws/auth_manager/cli/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py similarity index 97% rename from tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py rename to providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py index f285beca8af1..6122079fee4c 100644 --- a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py +++ b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py @@ -23,8 +23,9 @@ from airflow.cli import cli_parser from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS +from dev.tests_common.test_utils.config import conf_vars mock_boto3 = Mock() diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_definition.py b/providers/tests/amazon/aws/auth_manager/cli/test_definition.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/cli/test_definition.py rename to providers/tests/amazon/aws/auth_manager/cli/test_definition.py diff --git a/tests/integration/providers/google/cloud/transfers/__init__.py b/providers/tests/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/__init__.py rename to providers/tests/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py similarity index 96% rename from tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py rename to providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py index ebb452fb1afb..b6071aac955c 100644 --- a/tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py +++ b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py @@ -21,7 +21,7 @@ import pytest from flask import Flask -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error python3_saml = pytest.importorskip("python3-saml") diff --git a/tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py similarity index 97% rename from tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py rename to providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py index d827ba3ff0e6..47e8a4cbcb26 100644 --- a/tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py +++ b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py @@ -38,9 +38,10 @@ ) from airflow.www import app as application from airflow.www.extensions.init_appbuilder import init_appbuilder -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response try: from airflow.auth.managers.models.resource_details import ( @@ -61,6 +62,7 @@ else: raise + if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod from airflow.auth.managers.models.resource_details import AssetDetails @@ -146,13 +148,17 @@ def client_admin(): ("aws_auth_manager", "avp_policy_store_id"): "avp_policy_store_id", } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_parser.parse_remote.return_value = SAML_METADATA_PARSED mock_is_policy_store_schema_up_to_date.return_value = True diff --git a/tests/providers/amazon/aws/auth_manager/test_constants.py b/providers/tests/amazon/aws/auth_manager/test_constants.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/test_constants.py rename to providers/tests/amazon/aws/auth_manager/test_constants.py diff --git a/tests/providers/amazon/aws/auth_manager/test_user.py b/providers/tests/amazon/aws/auth_manager/test_user.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/test_user.py rename to providers/tests/amazon/aws/auth_manager/test_user.py diff --git a/tests/integration/providers/microsoft/__init__.py b/providers/tests/amazon/aws/auth_manager/views/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/__init__.py rename to providers/tests/amazon/aws/auth_manager/views/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/views/test_auth.py b/providers/tests/amazon/aws/auth_manager/views/test_auth.py similarity index 80% rename from tests/providers/amazon/aws/auth_manager/views/test_auth.py rename to providers/tests/amazon/aws/auth_manager/views/test_auth.py index 05d2fb84b51c..9b2eec69188b 100644 --- a/tests/providers/amazon/aws/auth_manager/views/test_auth.py +++ b/providers/tests/amazon/aws/auth_manager/views/test_auth.py @@ -23,8 +23,9 @@ from airflow.exceptions import AirflowException from airflow.www import app as application -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Test requires Airflow 2.9+"), @@ -62,11 +63,14 @@ def aws_app(): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED return application.create_app(testing=True, config={"WTF_CSRF_ENABLED": False}) @@ -102,13 +106,17 @@ def test_login_callback_set_user_in_session(self): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED @@ -140,13 +148,17 @@ def test_login_callback_raise_exception_if_errors(self): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED diff --git a/tests/integration/providers/microsoft/mssql/__init__.py b/providers/tests/amazon/aws/config_templates/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/__init__.py rename to providers/tests/amazon/aws/config_templates/__init__.py diff --git a/tests/providers/amazon/aws/config_templates/args.json b/providers/tests/amazon/aws/config_templates/args.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/args.json rename to providers/tests/amazon/aws/config_templates/args.json diff --git a/tests/providers/amazon/aws/config_templates/job.j2.json b/providers/tests/amazon/aws/config_templates/job.j2.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/job.j2.json rename to providers/tests/amazon/aws/config_templates/job.j2.json diff --git a/tests/providers/amazon/aws/config_templates/steps.j2.json b/providers/tests/amazon/aws/config_templates/steps.j2.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/steps.j2.json rename to providers/tests/amazon/aws/config_templates/steps.j2.json diff --git a/tests/providers/amazon/aws/config_templates/steps.json b/providers/tests/amazon/aws/config_templates/steps.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/steps.json rename to providers/tests/amazon/aws/config_templates/steps.json diff --git a/tests/integration/providers/microsoft/mssql/hooks/__init__.py b/providers/tests/amazon/aws/deferrable/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/hooks/__init__.py rename to providers/tests/amazon/aws/deferrable/__init__.py diff --git a/tests/integration/providers/mongo/__init__.py b/providers/tests/amazon/aws/deferrable/hooks/__init__.py similarity index 100% rename from tests/integration/providers/mongo/__init__.py rename to providers/tests/amazon/aws/deferrable/hooks/__init__.py diff --git a/tests/integration/providers/mongo/sensors/__init__.py b/providers/tests/amazon/aws/executors/__init__.py similarity index 100% rename from tests/integration/providers/mongo/sensors/__init__.py rename to providers/tests/amazon/aws/executors/__init__.py diff --git a/tests/integration/providers/openlineage/__init__.py b/providers/tests/amazon/aws/executors/batch/__init__.py similarity index 100% rename from tests/integration/providers/openlineage/__init__.py rename to providers/tests/amazon/aws/executors/batch/__init__.py diff --git a/tests/providers/amazon/aws/executors/batch/test_batch_executor.py b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py similarity index 99% rename from tests/providers/amazon/aws/executors/batch/test_batch_executor.py rename to providers/tests/amazon/aws/executors/batch/test_batch_executor.py index 8b81e4a966a4..e7037bd16c85 100644 --- a/tests/providers/amazon/aws/executors/batch/test_batch_executor.py +++ b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py @@ -45,8 +45,9 @@ from airflow.utils.helpers import convert_camel_to_snake from airflow.utils.state import State from airflow.version import version as airflow_version_str -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.config import conf_vars + +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.config import conf_vars airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3])) ARN1 = "arn1" @@ -652,9 +653,9 @@ def teardown_method(self) -> None: ) def test_validate_config_defaults(self): """Assert that the defaults stated in the config.yml file match those in utils.CONFIG_DEFAULTS.""" - curr_dir = os.path.dirname(os.path.abspath(__file__)) - executor_path = "aws/executors/batch" - config_filename = curr_dir.replace("tests", "airflow").replace(executor_path, "provider.yaml") + from airflow.providers.amazon import __file__ as provider_path + + config_filename = os.path.join(os.path.dirname(provider_path), "provider.yaml") with open(config_filename) as config: options = yaml.safe_load(config)["config"][CONFIG_GROUP_NAME]["options"] diff --git a/tests/integration/providers/openlineage/operators/__init__.py b/providers/tests/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from tests/integration/providers/openlineage/operators/__init__.py rename to providers/tests/amazon/aws/executors/ecs/__init__.py diff --git a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py similarity index 99% rename from tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py rename to providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py index 51e3c1793490..50cdb580382f 100644 --- a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py @@ -56,9 +56,10 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import utcnow from airflow.version import version as airflow_version_str -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -1219,9 +1220,9 @@ def test_validate_config_defaults(self): airflow sources, not when airflow is installed from packages, because airflow installed from packages will not have the provider.yml file. """ - curr_dir = os.path.dirname(os.path.abspath(__file__)) - executor_path = "aws/executors/ecs" - config_filename = curr_dir.replace("tests", "airflow").replace(executor_path, "provider.yaml") + from airflow.providers.amazon import __file__ as provider_path + + config_filename = os.path.join(os.path.dirname(provider_path), "provider.yaml") with open(config_filename) as config: options = yaml.safe_load(config)["config"][CONFIG_GROUP_NAME]["options"] diff --git a/tests/integration/providers/qdrant/__init__.py b/providers/tests/amazon/aws/executors/utils/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/__init__.py rename to providers/tests/amazon/aws/executors/utils/__init__.py diff --git a/tests/providers/amazon/aws/executors/utils/test_exponential_backoff_retry.py b/providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py similarity index 100% rename from tests/providers/amazon/aws/executors/utils/test_exponential_backoff_retry.py rename to providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py diff --git a/tests/integration/providers/qdrant/hooks/__init__.py b/providers/tests/amazon/aws/fs/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/hooks/__init__.py rename to providers/tests/amazon/aws/fs/__init__.py diff --git a/tests/providers/amazon/aws/fs/test_s3.py b/providers/tests/amazon/aws/fs/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/fs/test_s3.py rename to providers/tests/amazon/aws/fs/test_s3.py diff --git a/tests/integration/providers/qdrant/operators/__init__.py b/providers/tests/amazon/aws/hooks/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/operators/__init__.py rename to providers/tests/amazon/aws/hooks/__init__.py diff --git a/tests/providers/amazon/aws/hooks/test_appflow.py b/providers/tests/amazon/aws/hooks/test_appflow.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_appflow.py rename to providers/tests/amazon/aws/hooks/test_appflow.py diff --git a/tests/providers/amazon/aws/hooks/test_athena.py b/providers/tests/amazon/aws/hooks/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_athena.py rename to providers/tests/amazon/aws/hooks/test_athena.py diff --git a/tests/providers/amazon/aws/hooks/test_athena_sql.py b/providers/tests/amazon/aws/hooks/test_athena_sql.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_athena_sql.py rename to providers/tests/amazon/aws/hooks/test_athena_sql.py diff --git a/tests/providers/amazon/aws/hooks/test_base_aws.py b/providers/tests/amazon/aws/hooks/test_base_aws.py similarity index 98% rename from tests/providers/amazon/aws/hooks/test_base_aws.py rename to providers/tests/amazon/aws/hooks/test_base_aws.py index 0957e6a928aa..c58993d748f2 100644 --- a/tests/providers/amazon/aws/hooks/test_base_aws.py +++ b/providers/tests/amazon/aws/hooks/test_base_aws.py @@ -49,7 +49,8 @@ resolve_session_factory, ) from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytest.importorskip("aiobotocore") @@ -140,7 +141,7 @@ def mock_conn(request): class TestSessionFactory: @conf_vars( - {("aws", "session_factory"): "tests.providers.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} + {("aws", "session_factory"): "providers.tests.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} ) def test_resolve_session_factory_class(self): cls = resolve_session_factory() @@ -545,13 +546,13 @@ def import_mock(name, *args): return mock_id_token_credentials return orig_import(name, *args) - with mock.patch("builtins.__import__", side_effect=import_mock), mock.patch.dict( - "os.environ", AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri() - ), mock.patch("airflow.providers.amazon.aws.hooks.base_aws.boto3") as mock_boto3, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.botocore" - ) as mock_botocore, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.botocore.session" - ) as mock_session: + with ( + mock.patch("builtins.__import__", side_effect=import_mock), + mock.patch.dict("os.environ", AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri()), + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.boto3") as mock_boto3, + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.botocore") as mock_botocore, + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.botocore.session") as mock_session, + ): hook = AwsBaseHook(aws_conn_id="aws_default", client_type="airflow_test") credentials_from_hook = hook.get_credentials() @@ -701,12 +702,14 @@ def mock_assume_role_with_saml(**kwargs): } return sts_response - with mock.patch("builtins.__import__", side_effect=import_mock), mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.requests.Session.get" - ) as mock_get, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.BaseSessionFactory._create_basic_session", - spec=boto3.session.Session, - ) as mocked_basic_session: + with ( + mock.patch("builtins.__import__", side_effect=import_mock), + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.requests.Session.get") as mock_get, + mock.patch( + "airflow.providers.amazon.aws.hooks.base_aws.BaseSessionFactory._create_basic_session", + spec=boto3.session.Session, + ) as mocked_basic_session, + ): mocked_basic_session.return_value.region_name = "us-east-2" mock_client = mocked_basic_session.return_value.client mock_client.return_value.assume_role_with_saml.side_effect = mock_assume_role_with_saml diff --git a/tests/providers/amazon/aws/hooks/test_batch_client.py b/providers/tests/amazon/aws/hooks/test_batch_client.py similarity index 98% rename from tests/providers/amazon/aws/hooks/test_batch_client.py rename to providers/tests/amazon/aws/hooks/test_batch_client.py index 83385878b57f..98b89de0a60b 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_client.py +++ b/providers/tests/amazon/aws/hooks/test_batch_client.py @@ -126,13 +126,11 @@ def test_wait_for_job_with_logs(self): thread_stop = mock.Mock(side_effect=lambda: time.sleep(2)) thread_join = mock.Mock(side_effect=lambda: time.sleep(2)) - with mock.patch.object( - batch_log_fetcher, "start", thread_start - ) as mock_fetcher_start, mock.patch.object( - batch_log_fetcher, "stop", thread_stop - ) as mock_fetcher_stop, mock.patch.object( - batch_log_fetcher, "join", thread_join - ) as mock_fetcher_join: + with ( + mock.patch.object(batch_log_fetcher, "start", thread_start) as mock_fetcher_start, + mock.patch.object(batch_log_fetcher, "stop", thread_stop) as mock_fetcher_stop, + mock.patch.object(batch_log_fetcher, "join", thread_join) as mock_fetcher_join, + ): self.batch_client.wait_for_job(JOB_ID, get_batch_log_fetcher=mock_get_batch_log_fetcher) mock_get_batch_log_fetcher.assert_called_with(JOB_ID) mock_fetcher_start.assert_called_once() diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py b/providers/tests/amazon/aws/hooks/test_batch_waiters.py similarity index 97% rename from tests/providers/amazon/aws/hooks/test_batch_waiters.py rename to providers/tests/amazon/aws/hooks/test_batch_waiters.py index 72f2061b902c..d67b25d07f2a 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py +++ b/providers/tests/amazon/aws/hooks/test_batch_waiters.py @@ -157,13 +157,12 @@ def test_wait_for_job_with_cloudwatch_logs(self): thread_stop = mock.Mock(side_effect=lambda: time.sleep(2)) thread_join = mock.Mock(side_effect=lambda: time.sleep(2)) - with mock.patch.object(self.batch_waiters, "get_waiter") as mock_get_waiter, mock.patch.object( - batch_log_fetcher, "start", thread_start - ) as mock_fetcher_start, mock.patch.object( - batch_log_fetcher, "stop", thread_stop - ) as mock_fetcher_stop, mock.patch.object( - batch_log_fetcher, "join", thread_join - ) as mock_fetcher_join: + with ( + mock.patch.object(self.batch_waiters, "get_waiter") as mock_get_waiter, + mock.patch.object(batch_log_fetcher, "start", thread_start) as mock_fetcher_start, + mock.patch.object(batch_log_fetcher, "stop", thread_stop) as mock_fetcher_stop, + mock.patch.object(batch_log_fetcher, "join", thread_join) as mock_fetcher_join, + ): # Run the wait_for_job method self.batch_waiters.wait_for_job(self.job_id, get_batch_log_fetcher=mock_get_batch_log_fetcher) diff --git a/tests/providers/amazon/aws/hooks/test_bedrock.py b/providers/tests/amazon/aws/hooks/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_bedrock.py rename to providers/tests/amazon/aws/hooks/test_bedrock.py diff --git a/tests/providers/amazon/aws/hooks/test_chime.py b/providers/tests/amazon/aws/hooks/test_chime.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_chime.py rename to providers/tests/amazon/aws/hooks/test_chime.py diff --git a/tests/providers/amazon/aws/hooks/test_cloud_formation.py b/providers/tests/amazon/aws/hooks/test_cloud_formation.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_cloud_formation.py rename to providers/tests/amazon/aws/hooks/test_cloud_formation.py diff --git a/tests/providers/amazon/aws/hooks/test_comprehend.py b/providers/tests/amazon/aws/hooks/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_comprehend.py rename to providers/tests/amazon/aws/hooks/test_comprehend.py diff --git a/tests/providers/amazon/aws/hooks/test_datasync.py b/providers/tests/amazon/aws/hooks/test_datasync.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_datasync.py rename to providers/tests/amazon/aws/hooks/test_datasync.py diff --git a/tests/providers/amazon/aws/hooks/test_dms.py b/providers/tests/amazon/aws/hooks/test_dms.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_dms.py rename to providers/tests/amazon/aws/hooks/test_dms.py diff --git a/tests/providers/amazon/aws/hooks/test_dynamodb.py b/providers/tests/amazon/aws/hooks/test_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_dynamodb.py rename to providers/tests/amazon/aws/hooks/test_dynamodb.py diff --git a/tests/providers/amazon/aws/hooks/test_ec2.py b/providers/tests/amazon/aws/hooks/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ec2.py rename to providers/tests/amazon/aws/hooks/test_ec2.py diff --git a/tests/providers/amazon/aws/hooks/test_ecr.py b/providers/tests/amazon/aws/hooks/test_ecr.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ecr.py rename to providers/tests/amazon/aws/hooks/test_ecr.py diff --git a/tests/providers/amazon/aws/hooks/test_ecs.py b/providers/tests/amazon/aws/hooks/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ecs.py rename to providers/tests/amazon/aws/hooks/test_ecs.py diff --git a/tests/providers/amazon/aws/hooks/test_eks.py b/providers/tests/amazon/aws/hooks/test_eks.py similarity index 99% rename from tests/providers/amazon/aws/hooks/test_eks.py rename to providers/tests/amazon/aws/hooks/test_eks.py index ae0f0e19949f..cf3638d30d37 100644 --- a/tests/providers/amazon/aws/hooks/test_eks.py +++ b/providers/tests/amazon/aws/hooks/test_eks.py @@ -53,7 +53,8 @@ ) from airflow.providers.amazon.aws.hooks.eks import COMMAND, EksHook -from tests.providers.amazon.aws.utils.eks_test_constants import ( + +from providers.tests.amazon.aws.utils.eks_test_constants import ( DEFAULT_CONN_ID, DEFAULT_NAMESPACE, DISK_SIZE, @@ -82,7 +83,7 @@ RegExTemplates, ResponseAttributes, ) -from tests.providers.amazon.aws.utils.eks_test_utils import ( +from providers.tests.amazon.aws.utils.eks_test_utils import ( attributes_to_test, generate_clusters, generate_dict, diff --git a/tests/providers/amazon/aws/hooks/test_elasticache_replication_group.py b/providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_elasticache_replication_group.py rename to providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py diff --git a/tests/providers/amazon/aws/hooks/test_emr.py b/providers/tests/amazon/aws/hooks/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr.py rename to providers/tests/amazon/aws/hooks/test_emr.py diff --git a/tests/providers/amazon/aws/hooks/test_emr_containers.py b/providers/tests/amazon/aws/hooks/test_emr_containers.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr_containers.py rename to providers/tests/amazon/aws/hooks/test_emr_containers.py diff --git a/tests/providers/amazon/aws/hooks/test_emr_serverless.py b/providers/tests/amazon/aws/hooks/test_emr_serverless.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr_serverless.py rename to providers/tests/amazon/aws/hooks/test_emr_serverless.py diff --git a/tests/providers/amazon/aws/hooks/test_eventbridge.py b/providers/tests/amazon/aws/hooks/test_eventbridge.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_eventbridge.py rename to providers/tests/amazon/aws/hooks/test_eventbridge.py diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py b/providers/tests/amazon/aws/hooks/test_glacier.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glacier.py rename to providers/tests/amazon/aws/hooks/test_glacier.py diff --git a/tests/providers/amazon/aws/hooks/test_glue.py b/providers/tests/amazon/aws/hooks/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue.py rename to providers/tests/amazon/aws/hooks/test_glue.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_catalog.py b/providers/tests/amazon/aws/hooks/test_glue_catalog.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_catalog.py rename to providers/tests/amazon/aws/hooks/test_glue_catalog.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_crawler.py b/providers/tests/amazon/aws/hooks/test_glue_crawler.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_crawler.py rename to providers/tests/amazon/aws/hooks/test_glue_crawler.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_databrew.py b/providers/tests/amazon/aws/hooks/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_databrew.py rename to providers/tests/amazon/aws/hooks/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/hooks/test_hooks_signature.py b/providers/tests/amazon/aws/hooks/test_hooks_signature.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_hooks_signature.py rename to providers/tests/amazon/aws/hooks/test_hooks_signature.py diff --git a/tests/providers/amazon/aws/hooks/test_kinesis.py b/providers/tests/amazon/aws/hooks/test_kinesis.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_kinesis.py rename to providers/tests/amazon/aws/hooks/test_kinesis.py diff --git a/tests/providers/amazon/aws/hooks/test_kinesis_analytics.py b/providers/tests/amazon/aws/hooks/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_kinesis_analytics.py rename to providers/tests/amazon/aws/hooks/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/hooks/test_lambda_function.py b/providers/tests/amazon/aws/hooks/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_lambda_function.py rename to providers/tests/amazon/aws/hooks/test_lambda_function.py diff --git a/tests/providers/amazon/aws/hooks/test_logs.py b/providers/tests/amazon/aws/hooks/test_logs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_logs.py rename to providers/tests/amazon/aws/hooks/test_logs.py diff --git a/tests/providers/amazon/aws/hooks/test_neptune.py b/providers/tests/amazon/aws/hooks/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_neptune.py rename to providers/tests/amazon/aws/hooks/test_neptune.py diff --git a/tests/providers/amazon/aws/hooks/test_opensearch_serverless.py b/providers/tests/amazon/aws/hooks/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_opensearch_serverless.py rename to providers/tests/amazon/aws/hooks/test_opensearch_serverless.py diff --git a/tests/providers/amazon/aws/hooks/test_quicksight.py b/providers/tests/amazon/aws/hooks/test_quicksight.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_quicksight.py rename to providers/tests/amazon/aws/hooks/test_quicksight.py diff --git a/tests/providers/amazon/aws/hooks/test_rds.py b/providers/tests/amazon/aws/hooks/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_rds.py rename to providers/tests/amazon/aws/hooks/test_rds.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_cluster.py b/providers/tests/amazon/aws/hooks/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_cluster.py rename to providers/tests/amazon/aws/hooks/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_data.py b/providers/tests/amazon/aws/hooks/test_redshift_data.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_data.py rename to providers/tests/amazon/aws/hooks/test_redshift_data.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_sql.py b/providers/tests/amazon/aws/hooks/test_redshift_sql.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_sql.py rename to providers/tests/amazon/aws/hooks/test_redshift_sql.py diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/providers/tests/amazon/aws/hooks/test_s3.py similarity index 99% rename from tests/providers/amazon/aws/hooks/test_s3.py rename to providers/tests/amazon/aws/hooks/test_s3.py index 43c4b94445b6..2e9a010006ba 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/providers/tests/amazon/aws/hooks/test_s3.py @@ -42,7 +42,8 @@ unify_bucket_name_and_key, ) from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS @pytest.fixture diff --git a/tests/providers/amazon/aws/hooks/test_sagemaker.py b/providers/tests/amazon/aws/hooks/test_sagemaker.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sagemaker.py rename to providers/tests/amazon/aws/hooks/test_sagemaker.py diff --git a/tests/providers/amazon/aws/hooks/test_secrets_manager.py b/providers/tests/amazon/aws/hooks/test_secrets_manager.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_secrets_manager.py rename to providers/tests/amazon/aws/hooks/test_secrets_manager.py diff --git a/tests/providers/amazon/aws/hooks/test_ses.py b/providers/tests/amazon/aws/hooks/test_ses.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ses.py rename to providers/tests/amazon/aws/hooks/test_ses.py diff --git a/tests/providers/amazon/aws/hooks/test_sns.py b/providers/tests/amazon/aws/hooks/test_sns.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sns.py rename to providers/tests/amazon/aws/hooks/test_sns.py diff --git a/tests/providers/amazon/aws/hooks/test_sqs.py b/providers/tests/amazon/aws/hooks/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sqs.py rename to providers/tests/amazon/aws/hooks/test_sqs.py diff --git a/tests/providers/amazon/aws/hooks/test_ssm.py b/providers/tests/amazon/aws/hooks/test_ssm.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ssm.py rename to providers/tests/amazon/aws/hooks/test_ssm.py diff --git a/tests/providers/amazon/aws/hooks/test_step_function.py b/providers/tests/amazon/aws/hooks/test_step_function.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_step_function.py rename to providers/tests/amazon/aws/hooks/test_step_function.py diff --git a/tests/providers/amazon/aws/hooks/test_sts.py b/providers/tests/amazon/aws/hooks/test_sts.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sts.py rename to providers/tests/amazon/aws/hooks/test_sts.py diff --git a/tests/providers/amazon/aws/hooks/test_verified_permissions.py b/providers/tests/amazon/aws/hooks/test_verified_permissions.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_verified_permissions.py rename to providers/tests/amazon/aws/hooks/test_verified_permissions.py diff --git a/tests/integration/providers/redis/__init__.py b/providers/tests/amazon/aws/infrastructure/__init__.py similarity index 100% rename from tests/integration/providers/redis/__init__.py rename to providers/tests/amazon/aws/infrastructure/__init__.py diff --git a/tests/integration/providers/redis/hooks/__init__.py b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py similarity index 100% rename from tests/integration/providers/redis/hooks/__init__.py rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf diff --git a/tests/integration/providers/redis/operators/__init__.py b/providers/tests/amazon/aws/links/__init__.py similarity index 100% rename from tests/integration/providers/redis/operators/__init__.py rename to providers/tests/amazon/aws/links/__init__.py diff --git a/tests/providers/amazon/aws/links/test_athena.py b/providers/tests/amazon/aws/links/test_athena.py similarity index 95% rename from tests/providers/amazon/aws/links/test_athena.py rename to providers/tests/amazon/aws/links/test_athena.py index 1729fdf4e5c2..2da3f3fa441d 100644 --- a/tests/providers/amazon/aws/links/test_athena.py +++ b/providers/tests/amazon/aws/links/test_athena.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestAthenaQueryResultsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_base_aws.py b/providers/tests/amazon/aws/links/test_base_aws.py similarity index 98% rename from tests/providers/amazon/aws/links/test_base_aws.py rename to providers/tests/amazon/aws/links/test_base_aws.py index 1afcfea0a826..78622870806f 100644 --- a/tests/providers/amazon/aws/links/test_base_aws.py +++ b/providers/tests/amazon/aws/links/test_base_aws.py @@ -25,8 +25,9 @@ from airflow.models.xcom import XCom from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink from airflow.serialization.serialized_objects import SerializedDAG -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.mock_operators import MockOperator if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance diff --git a/tests/providers/amazon/aws/links/test_batch.py b/providers/tests/amazon/aws/links/test_batch.py similarity index 97% rename from tests/providers/amazon/aws/links/test_batch.py rename to providers/tests/amazon/aws/links/test_batch.py index 2c8c9b59cb83..eafe49260c9b 100644 --- a/tests/providers/amazon/aws/links/test_batch.py +++ b/providers/tests/amazon/aws/links/test_batch.py @@ -21,7 +21,8 @@ BatchJobDetailsLink, BatchJobQueueLink, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestBatchJobDefinitionLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_emr.py b/providers/tests/amazon/aws/links/test_emr.py similarity index 99% rename from tests/providers/amazon/aws/links/test_emr.py rename to providers/tests/amazon/aws/links/test_emr.py index 50db654d4914..7510b2a2f50d 100644 --- a/tests/providers/amazon/aws/links/test_emr.py +++ b/providers/tests/amazon/aws/links/test_emr.py @@ -32,7 +32,8 @@ get_log_uri, get_serverless_dashboard_url, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestEmrClusterLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_glue.py b/providers/tests/amazon/aws/links/test_glue.py similarity index 95% rename from tests/providers/amazon/aws/links/test_glue.py rename to providers/tests/amazon/aws/links/test_glue.py index 5f929cd3e950..772ac5ee9c77 100644 --- a/tests/providers/amazon/aws/links/test_glue.py +++ b/providers/tests/amazon/aws/links/test_glue.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.glue import GlueJobRunDetailsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestGlueJobRunDetailsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_logs.py b/providers/tests/amazon/aws/links/test_logs.py similarity index 96% rename from tests/providers/amazon/aws/links/test_logs.py rename to providers/tests/amazon/aws/links/test_logs.py index 991a8bc6f0c1..b596e7517eaf 100644 --- a/tests/providers/amazon/aws/links/test_logs.py +++ b/providers/tests/amazon/aws/links/test_logs.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.logs import CloudWatchEventsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestCloudWatchEventsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_step_function.py b/providers/tests/amazon/aws/links/test_step_function.py similarity index 97% rename from tests/providers/amazon/aws/links/test_step_function.py rename to providers/tests/amazon/aws/links/test_step_function.py index 3c6c9cc7cdc2..fab1205689dc 100644 --- a/tests/providers/amazon/aws/links/test_step_function.py +++ b/providers/tests/amazon/aws/links/test_step_function.py @@ -22,7 +22,8 @@ StateMachineDetailsLink, StateMachineExecutionsDetailsLink, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestStateMachineDetailsLink(BaseAwsLinksTestCase): diff --git a/tests/integration/providers/redis/sensors/__init__.py b/providers/tests/amazon/aws/log/__init__.py similarity index 100% rename from tests/integration/providers/redis/sensors/__init__.py rename to providers/tests/amazon/aws/log/__init__.py diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py similarity index 99% rename from tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py rename to providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py index 801935a5496a..c78fab89e197 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.utils import datetime_to_epoch_utc_ms from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def get_time_str(time_in_milliseconds): diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/providers/tests/amazon/aws/log/test_s3_task_handler.py similarity index 99% rename from tests/providers/amazon/aws/log/test_s3_task_handler.py rename to providers/tests/amazon/aws/log/test_s3_task_handler.py index fcb5d3c7833b..9819cf95e9cc 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/providers/tests/amazon/aws/log/test_s3_task_handler.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture(autouse=True) diff --git a/tests/integration/providers/trino/__init__.py b/providers/tests/amazon/aws/notifications/__init__.py similarity index 100% rename from tests/integration/providers/trino/__init__.py rename to providers/tests/amazon/aws/notifications/__init__.py diff --git a/tests/providers/amazon/aws/notifications/test_chime.py b/providers/tests/amazon/aws/notifications/test_chime.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_chime.py rename to providers/tests/amazon/aws/notifications/test_chime.py diff --git a/tests/providers/amazon/aws/notifications/test_sns.py b/providers/tests/amazon/aws/notifications/test_sns.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_sns.py rename to providers/tests/amazon/aws/notifications/test_sns.py diff --git a/tests/providers/amazon/aws/notifications/test_sqs.py b/providers/tests/amazon/aws/notifications/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_sqs.py rename to providers/tests/amazon/aws/notifications/test_sqs.py diff --git a/tests/integration/providers/trino/hooks/__init__.py b/providers/tests/amazon/aws/operators/__init__.py similarity index 100% rename from tests/integration/providers/trino/hooks/__init__.py rename to providers/tests/amazon/aws/operators/__init__.py diff --git a/tests/providers/amazon/aws/operators/athena_metadata.json b/providers/tests/amazon/aws/operators/athena_metadata.json similarity index 100% rename from tests/providers/amazon/aws/operators/athena_metadata.json rename to providers/tests/amazon/aws/operators/athena_metadata.json diff --git a/tests/providers/amazon/aws/operators/test_appflow.py b/providers/tests/amazon/aws/operators/test_appflow.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_appflow.py rename to providers/tests/amazon/aws/operators/test_appflow.py diff --git a/tests/providers/amazon/aws/operators/test_athena.py b/providers/tests/amazon/aws/operators/test_athena.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_athena.py rename to providers/tests/amazon/aws/operators/test_athena.py index 102d1fe31e5c..e4d7d6622a09 100644 --- a/tests/providers/amazon/aws/operators/test_athena.py +++ b/providers/tests/amazon/aws/operators/test_athena.py @@ -17,6 +17,7 @@ from __future__ import annotations import json +import os from unittest import mock import pytest @@ -39,7 +40,8 @@ from airflow.utils import timezone from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TEST_DAG_ID = "unit_tests" DEFAULT_DATE = datetime(2018, 1, 1) @@ -299,7 +301,7 @@ def test_operator_openlineage_data(self, mock_conn, mock_region_name): mock_region_name.return_value = "eu-west-1" def mock_get_table_metadata(CatalogName, DatabaseName, TableName): - with open("tests/providers/amazon/aws/operators/athena_metadata.json") as f: + with open(os.path.dirname(__file__) + "/athena_metadata.json") as f: return json.load(f)[TableName] mock_conn.return_value.get_table_metadata = mock_get_table_metadata diff --git a/tests/providers/amazon/aws/operators/test_base_aws.py b/providers/tests/amazon/aws/operators/test_base_aws.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_base_aws.py rename to providers/tests/amazon/aws/operators/test_base_aws.py diff --git a/tests/providers/amazon/aws/operators/test_batch.py b/providers/tests/amazon/aws/operators/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_batch.py rename to providers/tests/amazon/aws/operators/test_batch.py diff --git a/tests/providers/amazon/aws/operators/test_bedrock.py b/providers/tests/amazon/aws/operators/test_bedrock.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_bedrock.py rename to providers/tests/amazon/aws/operators/test_bedrock.py index 8cbb67d6f50d..5e8cd32e1a4d 100644 --- a/tests/providers/amazon/aws/operators/test_bedrock.py +++ b/providers/tests/amazon/aws/operators/test_bedrock.py @@ -35,7 +35,8 @@ BedrockInvokeModelOperator, BedrockRaGOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_cloud_formation.py b/providers/tests/amazon/aws/operators/test_cloud_formation.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_cloud_formation.py rename to providers/tests/amazon/aws/operators/test_cloud_formation.py index 4d8fb4d12bd3..47b1659b426f 100644 --- a/tests/providers/amazon/aws/operators/test_cloud_formation.py +++ b/providers/tests/amazon/aws/operators/test_cloud_formation.py @@ -28,7 +28,8 @@ CloudFormationDeleteStackOperator, ) from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2019, 1, 1) DEFAULT_ARGS = {"owner": "airflow", "start_date": DEFAULT_DATE} diff --git a/tests/providers/amazon/aws/operators/test_comprehend.py b/providers/tests/amazon/aws/operators/test_comprehend.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_comprehend.py rename to providers/tests/amazon/aws/operators/test_comprehend.py index a86b779b1d50..170f7b676082 100644 --- a/tests/providers/amazon/aws/operators/test_comprehend.py +++ b/providers/tests/amazon/aws/operators/test_comprehend.py @@ -29,7 +29,8 @@ ComprehendStartPiiEntitiesDetectionJobOperator, ) from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_datasync.py b/providers/tests/amazon/aws/operators/test_datasync.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_datasync.py rename to providers/tests/amazon/aws/operators/test_datasync.py index 18b0e86103c0..52c5863b8aa7 100644 --- a/tests/providers/amazon/aws/operators/test_datasync.py +++ b/providers/tests/amazon/aws/operators/test_datasync.py @@ -29,7 +29,8 @@ from airflow.utils import timezone from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TEST_DAG_ID = "unit_tests" DEFAULT_DATE = datetime(2018, 1, 1) diff --git a/tests/providers/amazon/aws/operators/test_dms.py b/providers/tests/amazon/aws/operators/test_dms.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_dms.py rename to providers/tests/amazon/aws/operators/test_dms.py index 2528edaef9e0..0c99dc2cd933 100644 --- a/tests/providers/amazon/aws/operators/test_dms.py +++ b/providers/tests/amazon/aws/operators/test_dms.py @@ -34,7 +34,8 @@ ) from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TASK_ARN = "test_arn" diff --git a/tests/providers/amazon/aws/operators/test_ec2.py b/providers/tests/amazon/aws/operators/test_ec2.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_ec2.py rename to providers/tests/amazon/aws/operators/test_ec2.py index a5ea81ff6ae8..b4b576c6567f 100644 --- a/tests/providers/amazon/aws/operators/test_ec2.py +++ b/providers/tests/amazon/aws/operators/test_ec2.py @@ -30,7 +30,8 @@ EC2StopInstanceOperator, EC2TerminateInstanceOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields class BaseEc2TestClass: diff --git a/tests/providers/amazon/aws/operators/test_ecs.py b/providers/tests/amazon/aws/operators/test_ecs.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_ecs.py rename to providers/tests/amazon/aws/operators/test_ecs.py index d7b6c0d4e871..ed900acb7364 100644 --- a/tests/providers/amazon/aws/operators/test_ecs.py +++ b/providers/tests/amazon/aws/operators/test_ecs.py @@ -38,7 +38,8 @@ from airflow.providers.amazon.aws.triggers.ecs import TaskDoneTrigger from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_NAME = "test_cluster" CONTAINER_NAME = "e1ed7aac-d9b2-4315-8726-d2432bf11868" diff --git a/tests/providers/amazon/aws/operators/test_eks.py b/providers/tests/amazon/aws/operators/test_eks.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_eks.py rename to providers/tests/amazon/aws/operators/test_eks.py index 2daa48462687..8105614849a3 100644 --- a/tests/providers/amazon/aws/operators/test_eks.py +++ b/providers/tests/amazon/aws/operators/test_eks.py @@ -41,7 +41,8 @@ ) from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction from airflow.typing_compat import TypedDict -from tests.providers.amazon.aws.utils.eks_test_constants import ( + +from providers.tests.amazon.aws.utils.eks_test_constants import ( NODEROLE_ARN, POD_EXECUTION_ROLE_ARN, RESOURCES_VPC_CONFIG, @@ -50,9 +51,9 @@ SUBNET_IDS, TASK_ID, ) -from tests.providers.amazon.aws.utils.eks_test_utils import convert_keys -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from providers.tests.amazon.aws.utils.eks_test_utils import convert_keys +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type CLUSTER_NAME = "cluster1" NODEGROUP_NAME = "nodegroup1" diff --git a/tests/providers/amazon/aws/operators/test_emr_add_steps.py b/providers/tests/amazon/aws/operators/test_emr_add_steps.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_add_steps.py rename to providers/tests/amazon/aws/operators/test_emr_add_steps.py index d5a999349aa5..4414ae432707 100644 --- a/tests/providers/amazon/aws/operators/test_emr_add_steps.py +++ b/providers/tests/amazon/aws/operators/test_emr_add_steps.py @@ -18,8 +18,8 @@ from __future__ import annotations import json -import os from datetime import timedelta +from pathlib import Path from unittest.mock import MagicMock, call, patch import pytest @@ -31,16 +31,14 @@ from airflow.providers.amazon.aws.triggers.emr import EmrAddStepsTrigger from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.test_utils import AIRFLOW_MAIN_FOLDER + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2017, 1, 1) ADD_STEPS_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "StepIds": ["s-2LH3R5GW3A53T"]} -TEMPLATE_SEARCHPATH = os.path.join( - AIRFLOW_MAIN_FOLDER, "tests", "providers", "amazon", "aws", "config_templates" -) +TEMPLATE_SEARCHPATH = Path(__file__).parents[1].joinpath("config_templates").as_posix() @pytest.fixture diff --git a/tests/providers/amazon/aws/operators/test_emr_containers.py b/providers/tests/amazon/aws/operators/test_emr_containers.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_containers.py rename to providers/tests/amazon/aws/operators/test_emr_containers.py index 52306864f359..b31364bb02f5 100644 --- a/tests/providers/amazon/aws/operators/test_emr_containers.py +++ b/providers/tests/amazon/aws/operators/test_emr_containers.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook from airflow.providers.amazon.aws.operators.emr import EmrContainerOperator, EmrEksCreateClusterOperator from airflow.providers.amazon.aws.triggers.emr import EmrContainerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields SUBMIT_JOB_SUCCESS_RETURN = { "ResponseMetadata": {"HTTPStatusCode": 200}, diff --git a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py b/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_emr_create_job_flow.py rename to providers/tests/amazon/aws/operators/test_emr_create_job_flow.py index 860df8c7219a..b2f7c8eb48f7 100644 --- a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py +++ b/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -import os from datetime import timedelta +from pathlib import Path from unittest import mock from unittest.mock import MagicMock, patch @@ -32,9 +32,9 @@ from airflow.providers.amazon.aws.triggers.emr import EmrCreateJobFlowTrigger from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type -from tests.test_utils import AIRFLOW_MAIN_FOLDER + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type TASK_ID = "test_task" @@ -45,9 +45,7 @@ JOB_FLOW_ID = "j-8989898989" RUN_JOB_FLOW_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "JobFlowId": JOB_FLOW_ID} -TEMPLATE_SEARCHPATH = os.path.join( - AIRFLOW_MAIN_FOLDER, "tests", "providers", "amazon", "aws", "config_templates" -) +TEMPLATE_SEARCHPATH = Path(__file__).parents[1].joinpath("config_templates").as_posix() @pytest.fixture diff --git a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py b/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_modify_cluster.py rename to providers/tests/amazon/aws/operators/test_emr_modify_cluster.py index 6f257288760c..4c7aae9b4c27 100644 --- a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py +++ b/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import EmrModifyClusterOperator from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2017, 1, 1) MODIFY_CLUSTER_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "StepConcurrencyLevel": 1} diff --git a/tests/providers/amazon/aws/operators/test_emr_notebook_execution.py b/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_notebook_execution.py rename to providers/tests/amazon/aws/operators/test_emr_notebook_execution.py index 6fcd4eeb7462..42ce47ea6b69 100644 --- a/tests/providers/amazon/aws/operators/test_emr_notebook_execution.py +++ b/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py @@ -28,8 +28,9 @@ EmrStartNotebookExecutionOperator, EmrStopNotebookExecutionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type PARAMS = { "EditorId": "test_editor", diff --git a/tests/providers/amazon/aws/operators/test_emr_serverless.py b/providers/tests/amazon/aws/operators/test_emr_serverless.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_serverless.py rename to providers/tests/amazon/aws/operators/test_emr_serverless.py index c84d1032bcf7..bde6e9895f5c 100644 --- a/tests/providers/amazon/aws/operators/test_emr_serverless.py +++ b/providers/tests/amazon/aws/operators/test_emr_serverless.py @@ -32,7 +32,8 @@ EmrServerlessStopApplicationOperator, ) from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py b/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py rename to providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py index 06ab35e4510b..6ce3fb29cd6b 100644 --- a/tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py +++ b/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py @@ -24,7 +24,8 @@ from airflow.exceptions import TaskDeferred from airflow.providers.amazon.aws.operators.emr import EmrTerminateJobFlowOperator from airflow.providers.amazon.aws.triggers.emr import EmrTerminateJobFlowTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TERMINATE_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}} diff --git a/tests/providers/amazon/aws/operators/test_eventbridge.py b/providers/tests/amazon/aws/operators/test_eventbridge.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_eventbridge.py rename to providers/tests/amazon/aws/operators/test_eventbridge.py index 3c682f1477e0..129edf9ff624 100644 --- a/tests/providers/amazon/aws/operators/test_eventbridge.py +++ b/providers/tests/amazon/aws/operators/test_eventbridge.py @@ -29,7 +29,8 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_glacier.py b/providers/tests/amazon/aws/operators/test_glacier.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_glacier.py rename to providers/tests/amazon/aws/operators/test_glacier.py index f46b0bc929fe..ff4d220076f8 100644 --- a/tests/providers/amazon/aws/operators/test_glacier.py +++ b/providers/tests/amazon/aws/operators/test_glacier.py @@ -26,7 +26,8 @@ GlacierCreateJobOperator, GlacierUploadArchiveOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator diff --git a/tests/providers/amazon/aws/operators/test_glue.py b/providers/tests/amazon/aws/operators/test_glue.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_glue.py rename to providers/tests/amazon/aws/operators/test_glue.py index e1adcee7d639..8243940e695f 100644 --- a/tests/providers/amazon/aws/operators/test_glue.py +++ b/providers/tests/amazon/aws/operators/test_glue.py @@ -34,7 +34,8 @@ GlueDataQualityRuleSetEvaluationRunOperator, GlueJobOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.models import TaskInstance diff --git a/tests/providers/amazon/aws/operators/test_glue_crawler.py b/providers/tests/amazon/aws/operators/test_glue_crawler.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_glue_crawler.py rename to providers/tests/amazon/aws/operators/test_glue_crawler.py index 1e5a3f2177b5..1df63a4e2ad0 100644 --- a/tests/providers/amazon/aws/operators/test_glue_crawler.py +++ b/providers/tests/amazon/aws/operators/test_glue_crawler.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.hooks.sts import StsHook from airflow.providers.amazon.aws.operators.glue_crawler import GlueCrawlerOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_glue_databrew.py b/providers/tests/amazon/aws/operators/test_glue_databrew.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_glue_databrew.py rename to providers/tests/amazon/aws/operators/test_glue_databrew.py index 698b206acfb1..1b46549df454 100644 --- a/tests/providers/amazon/aws/operators/test_glue_databrew.py +++ b/providers/tests/amazon/aws/operators/test_glue_databrew.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook from airflow.providers.amazon.aws.operators.glue_databrew import GlueDataBrewStartJobOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields JOB_NAME = "test_job" diff --git a/tests/providers/amazon/aws/operators/test_kinesis_analytics.py b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_kinesis_analytics.py rename to providers/tests/amazon/aws/operators/test_kinesis_analytics.py index ab8bb3123008..b6b92e4c0b13 100644 --- a/tests/providers/amazon/aws/operators/test_kinesis_analytics.py +++ b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py @@ -30,7 +30,8 @@ KinesisAnalyticsV2StartApplicationOperator, KinesisAnalyticsV2StopApplicationOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_lambda_function.py b/providers/tests/amazon/aws/operators/test_lambda_function.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_lambda_function.py rename to providers/tests/amazon/aws/operators/test_lambda_function.py index e3a5b8cad620..7ec081969103 100644 --- a/tests/providers/amazon/aws/operators/test_lambda_function.py +++ b/providers/tests/amazon/aws/operators/test_lambda_function.py @@ -29,7 +29,8 @@ LambdaCreateFunctionOperator, LambdaInvokeFunctionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields FUNCTION_NAME = "function_name" PAYLOADS = [ diff --git a/tests/providers/amazon/aws/operators/test_neptune.py b/providers/tests/amazon/aws/operators/test_neptune.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_neptune.py rename to providers/tests/amazon/aws/operators/test_neptune.py index 146effaaa20f..0606f06e053a 100644 --- a/tests/providers/amazon/aws/operators/test_neptune.py +++ b/providers/tests/amazon/aws/operators/test_neptune.py @@ -30,7 +30,8 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_ID = "test_cluster" diff --git a/tests/providers/amazon/aws/operators/test_quicksight.py b/providers/tests/amazon/aws/operators/test_quicksight.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_quicksight.py rename to providers/tests/amazon/aws/operators/test_quicksight.py index f2d23c7b8179..063c5fc44a46 100644 --- a/tests/providers/amazon/aws/operators/test_quicksight.py +++ b/providers/tests/amazon/aws/operators/test_quicksight.py @@ -21,7 +21,8 @@ from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook from airflow.providers.amazon.aws.operators.quicksight import QuickSightCreateIngestionOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DATA_SET_ID = "DemoDataSet" INGESTION_ID = "DemoDataSet_Ingestion" diff --git a/tests/providers/amazon/aws/operators/test_rds.py b/providers/tests/amazon/aws/operators/test_rds.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_rds.py rename to providers/tests/amazon/aws/operators/test_rds.py index 0574d4b553b4..3d0fbcb84096 100644 --- a/tests/providers/amazon/aws/operators/test_rds.py +++ b/providers/tests/amazon/aws/operators/test_rds.py @@ -44,7 +44,8 @@ ) from airflow.providers.amazon.aws.triggers.rds import RdsDbAvailableTrigger, RdsDbStoppedTrigger from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook diff --git a/tests/providers/amazon/aws/operators/test_redshift_cluster.py b/providers/tests/amazon/aws/operators/test_redshift_cluster.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_redshift_cluster.py rename to providers/tests/amazon/aws/operators/test_redshift_cluster.py index e48f7b2ed96e..e67bff3390c0 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_cluster.py +++ b/providers/tests/amazon/aws/operators/test_redshift_cluster.py @@ -38,7 +38,8 @@ RedshiftPauseClusterTrigger, RedshiftResumeClusterTrigger, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields class TestRedshiftCreateClusterOperator: diff --git a/tests/providers/amazon/aws/operators/test_redshift_data.py b/providers/tests/amazon/aws/operators/test_redshift_data.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_redshift_data.py rename to providers/tests/amazon/aws/operators/test_redshift_data.py index c4972e4c42e7..d367fc3ca959 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_data.py +++ b/providers/tests/amazon/aws/operators/test_redshift_data.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.redshift_data import QueryExecutionOutput from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.triggers.redshift_data import RedshiftDataTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CONN_ID = "aws_conn_test" TASK_ID = "task_id" diff --git a/tests/providers/amazon/aws/operators/test_redshift_sql.py b/providers/tests/amazon/aws/operators/test_redshift_sql.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_redshift_sql.py rename to providers/tests/amazon/aws/operators/test_redshift_sql.py diff --git a/tests/providers/amazon/aws/operators/test_s3.py b/providers/tests/amazon/aws/operators/test_s3.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_s3.py rename to providers/tests/amazon/aws/operators/test_s3.py index 937baefde59e..8c6dbd2df50f 100644 --- a/tests/providers/amazon/aws/operators/test_s3.py +++ b/providers/tests/amazon/aws/operators/test_s3.py @@ -57,7 +57,8 @@ from airflow.providers.openlineage.extractors import OperatorLineage from airflow.utils.timezone import datetime, utcnow from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields BUCKET_NAME = os.environ.get("BUCKET_NAME", "test-airflow-bucket") S3_KEY = "test-airflow-key" diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_base.py b/providers/tests/amazon/aws/operators/test_sagemaker_base.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_base.py rename to providers/tests/amazon/aws/operators/test_sagemaker_base.py index 5de40708d515..e6e1fcd5bd90 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_base.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_base.py @@ -32,7 +32,8 @@ ) from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CONFIG: dict = { "key1": "1", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py rename to providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py index 24cf944f8db6..c24cf39e79c3 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py rename to providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py index 1169f09d9141..c62721fcf3fe 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointConfigOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_ENDPOINT_CONFIG_PARAMS: dict = { "EndpointConfigName": "config_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_model.py b/providers/tests/amazon/aws/operators/test_sagemaker_model.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_model.py rename to providers/tests/amazon/aws/operators/test_sagemaker_model.py index 33d1f5b4d1f6..095c6990ce51 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_model.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_model.py @@ -31,7 +31,8 @@ SageMakerModelOperator, SageMakerRegisterModelVersionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py b/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_notebook.py rename to providers/tests/amazon/aws/operators/test_sagemaker_notebook.py index 093e264a3e45..0593b0f79842 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py @@ -30,7 +30,8 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields INSTANCE_NAME = "notebook" INSTANCE_TYPE = "ml.t3.medium" diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py b/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py rename to providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py index e7334de98df0..0a2d4d00ad85 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py @@ -29,7 +29,8 @@ SageMakerStopPipelineOperator, ) from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerPipelineTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_processing.py b/providers/tests/amazon/aws/operators/test_sagemaker_processing.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_processing.py rename to providers/tests/amazon/aws/operators/test_sagemaker_processing.py index b1ca2b62adbb..898aa68a6510 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_processing.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_processing.py @@ -31,7 +31,8 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_PROCESSING_PARAMS: dict = { "AppSpecification": { diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_training.py b/providers/tests/amazon/aws/operators/test_sagemaker_training.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_training.py rename to providers/tests/amazon/aws/operators/test_sagemaker_training.py index 85c6954ac1ac..f37a41967b9c 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_training.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_training.py @@ -31,7 +31,8 @@ ) from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["ResourceConfig", "InstanceCount"], diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py b/providers/tests/amazon/aws/operators/test_sagemaker_transform.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_transform.py rename to providers/tests/amazon/aws/operators/test_sagemaker_transform.py index 7804ddde2040..2452558ec422 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_transform.py @@ -30,7 +30,8 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["Transform", "TransformResources", "InstanceCount"], diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py b/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_tuning.py rename to providers/tests/amazon/aws/operators/test_sagemaker_tuning.py index 78058c771a28..9048b19b0ec7 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py @@ -26,7 +26,8 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerTuningOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["HyperParameterTuningJobConfig", "ResourceLimits", "MaxNumberOfTrainingJobs"], diff --git a/tests/providers/amazon/aws/operators/test_sns.py b/providers/tests/amazon/aws/operators/test_sns.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_sns.py rename to providers/tests/amazon/aws/operators/test_sns.py index 6c5de06822d3..d2571f6cc186 100644 --- a/tests/providers/amazon/aws/operators/test_sns.py +++ b/providers/tests/amazon/aws/operators/test_sns.py @@ -22,7 +22,8 @@ import pytest from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TASK_ID = "sns_publish_job" AWS_CONN_ID = "custom_aws_conn" diff --git a/tests/providers/amazon/aws/operators/test_sqs.py b/providers/tests/amazon/aws/operators/test_sqs.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sqs.py rename to providers/tests/amazon/aws/operators/test_sqs.py index 2187262fe7c3..4534a16bff7c 100644 --- a/tests/providers/amazon/aws/operators/test_sqs.py +++ b/providers/tests/amazon/aws/operators/test_sqs.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.sqs import SqsHook from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields REGION_NAME = "eu-west-1" QUEUE_NAME = "test-queue" diff --git a/tests/providers/amazon/aws/operators/test_step_function.py b/providers/tests/amazon/aws/operators/test_step_function.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_step_function.py rename to providers/tests/amazon/aws/operators/test_step_function.py index 29d743996af4..cac611381ee7 100644 --- a/tests/providers/amazon/aws/operators/test_step_function.py +++ b/providers/tests/amazon/aws/operators/test_step_function.py @@ -26,7 +26,8 @@ StepFunctionGetExecutionOutputOperator, StepFunctionStartExecutionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXECUTION_ARN = ( "arn:aws:states:us-east-1:123456789012:execution:" @@ -117,11 +118,14 @@ class TestStepFunctionStartExecutionOperator: @pytest.fixture(autouse=True) def setup_test_cases(self): - with mock.patch( - "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink.persist" - ) as executions_details_link, mock.patch( - "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink.persist" - ) as details_link: + with ( + mock.patch( + "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink.persist" + ) as executions_details_link, + mock.patch( + "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink.persist" + ) as details_link, + ): self.mocked_executions_details_link = executions_details_link self.mocked_details_link = details_link yield diff --git a/tests/integration/providers/ydb/__init__.py b/providers/tests/amazon/aws/secrets/__init__.py similarity index 100% rename from tests/integration/providers/ydb/__init__.py rename to providers/tests/amazon/aws/secrets/__init__.py diff --git a/tests/providers/amazon/aws/secrets/test_secrets_manager.py b/providers/tests/amazon/aws/secrets/test_secrets_manager.py similarity index 100% rename from tests/providers/amazon/aws/secrets/test_secrets_manager.py rename to providers/tests/amazon/aws/secrets/test_secrets_manager.py diff --git a/tests/providers/amazon/aws/secrets/test_systems_manager.py b/providers/tests/amazon/aws/secrets/test_systems_manager.py similarity index 99% rename from tests/providers/amazon/aws/secrets/test_systems_manager.py rename to providers/tests/amazon/aws/secrets/test_systems_manager.py index d4c9c0f9bc13..c1b35a799d3c 100644 --- a/tests/providers/amazon/aws/secrets/test_systems_manager.py +++ b/providers/tests/amazon/aws/secrets/test_systems_manager.py @@ -24,7 +24,8 @@ from airflow.configuration import initialize_secrets_backends from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars URI_CONNECTION = pytest.param( "postgres://my-login:my-pass@my-host:5432/my-schema?param1=val1¶m2=val2", id="uri-connection" diff --git a/tests/integration/providers/ydb/hooks/__init__.py b/providers/tests/amazon/aws/sensors/__init__.py similarity index 100% rename from tests/integration/providers/ydb/hooks/__init__.py rename to providers/tests/amazon/aws/sensors/__init__.py diff --git a/tests/providers/amazon/aws/sensors/test_athena.py b/providers/tests/amazon/aws/sensors/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_athena.py rename to providers/tests/amazon/aws/sensors/test_athena.py diff --git a/tests/providers/amazon/aws/sensors/test_base_aws.py b/providers/tests/amazon/aws/sensors/test_base_aws.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_base_aws.py rename to providers/tests/amazon/aws/sensors/test_base_aws.py diff --git a/tests/providers/amazon/aws/sensors/test_batch.py b/providers/tests/amazon/aws/sensors/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_batch.py rename to providers/tests/amazon/aws/sensors/test_batch.py diff --git a/tests/providers/amazon/aws/sensors/test_bedrock.py b/providers/tests/amazon/aws/sensors/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_bedrock.py rename to providers/tests/amazon/aws/sensors/test_bedrock.py diff --git a/tests/providers/amazon/aws/sensors/test_cloud_formation.py b/providers/tests/amazon/aws/sensors/test_cloud_formation.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_cloud_formation.py rename to providers/tests/amazon/aws/sensors/test_cloud_formation.py diff --git a/tests/providers/amazon/aws/sensors/test_comprehend.py b/providers/tests/amazon/aws/sensors/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_comprehend.py rename to providers/tests/amazon/aws/sensors/test_comprehend.py diff --git a/tests/providers/amazon/aws/sensors/test_dms.py b/providers/tests/amazon/aws/sensors/test_dms.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_dms.py rename to providers/tests/amazon/aws/sensors/test_dms.py diff --git a/tests/providers/amazon/aws/sensors/test_dynamodb.py b/providers/tests/amazon/aws/sensors/test_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_dynamodb.py rename to providers/tests/amazon/aws/sensors/test_dynamodb.py diff --git a/tests/providers/amazon/aws/sensors/test_ec2.py b/providers/tests/amazon/aws/sensors/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_ec2.py rename to providers/tests/amazon/aws/sensors/test_ec2.py diff --git a/tests/providers/amazon/aws/sensors/test_ecs.py b/providers/tests/amazon/aws/sensors/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_ecs.py rename to providers/tests/amazon/aws/sensors/test_ecs.py diff --git a/tests/providers/amazon/aws/sensors/test_eks.py b/providers/tests/amazon/aws/sensors/test_eks.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_eks.py rename to providers/tests/amazon/aws/sensors/test_eks.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_base.py b/providers/tests/amazon/aws/sensors/test_emr_base.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_base.py rename to providers/tests/amazon/aws/sensors/test_emr_base.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_containers.py b/providers/tests/amazon/aws/sensors/test_emr_containers.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_containers.py rename to providers/tests/amazon/aws/sensors/test_emr_containers.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_job_flow.py b/providers/tests/amazon/aws/sensors/test_emr_job_flow.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_job_flow.py rename to providers/tests/amazon/aws/sensors/test_emr_job_flow.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_notebook_execution.py b/providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_notebook_execution.py rename to providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_serverless_application.py b/providers/tests/amazon/aws/sensors/test_emr_serverless_application.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_serverless_application.py rename to providers/tests/amazon/aws/sensors/test_emr_serverless_application.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_serverless_job.py b/providers/tests/amazon/aws/sensors/test_emr_serverless_job.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_serverless_job.py rename to providers/tests/amazon/aws/sensors/test_emr_serverless_job.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_step.py b/providers/tests/amazon/aws/sensors/test_emr_step.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_step.py rename to providers/tests/amazon/aws/sensors/test_emr_step.py diff --git a/tests/providers/amazon/aws/sensors/test_glacier.py b/providers/tests/amazon/aws/sensors/test_glacier.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glacier.py rename to providers/tests/amazon/aws/sensors/test_glacier.py diff --git a/tests/providers/amazon/aws/sensors/test_glue.py b/providers/tests/amazon/aws/sensors/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue.py rename to providers/tests/amazon/aws/sensors/test_glue.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py b/providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py rename to providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_crawler.py b/providers/tests/amazon/aws/sensors/test_glue_crawler.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_crawler.py rename to providers/tests/amazon/aws/sensors/test_glue_crawler.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_data_quality.py b/providers/tests/amazon/aws/sensors/test_glue_data_quality.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_data_quality.py rename to providers/tests/amazon/aws/sensors/test_glue_data_quality.py diff --git a/tests/providers/amazon/aws/sensors/test_kinesis_analytics.py b/providers/tests/amazon/aws/sensors/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_kinesis_analytics.py rename to providers/tests/amazon/aws/sensors/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/sensors/test_lambda_function.py b/providers/tests/amazon/aws/sensors/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_lambda_function.py rename to providers/tests/amazon/aws/sensors/test_lambda_function.py diff --git a/tests/providers/amazon/aws/sensors/test_opensearch_serverless.py b/providers/tests/amazon/aws/sensors/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_opensearch_serverless.py rename to providers/tests/amazon/aws/sensors/test_opensearch_serverless.py diff --git a/tests/providers/amazon/aws/sensors/test_quicksight.py b/providers/tests/amazon/aws/sensors/test_quicksight.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_quicksight.py rename to providers/tests/amazon/aws/sensors/test_quicksight.py diff --git a/tests/providers/amazon/aws/sensors/test_rds.py b/providers/tests/amazon/aws/sensors/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_rds.py rename to providers/tests/amazon/aws/sensors/test_rds.py diff --git a/tests/providers/amazon/aws/sensors/test_redshift_cluster.py b/providers/tests/amazon/aws/sensors/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_redshift_cluster.py rename to providers/tests/amazon/aws/sensors/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/sensors/test_s3.py b/providers/tests/amazon/aws/sensors/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_s3.py rename to providers/tests/amazon/aws/sensors/test_s3.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_automl.py b/providers/tests/amazon/aws/sensors/test_sagemaker_automl.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_automl.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_automl.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_base.py b/providers/tests/amazon/aws/sensors/test_sagemaker_base.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_base.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_base.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_endpoint.py b/providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_endpoint.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_pipeline.py b/providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_pipeline.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_training.py b/providers/tests/amazon/aws/sensors/test_sagemaker_training.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_training.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_training.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_transform.py b/providers/tests/amazon/aws/sensors/test_sagemaker_transform.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_transform.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_transform.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_tuning.py b/providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_tuning.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py diff --git a/tests/providers/amazon/aws/sensors/test_sqs.py b/providers/tests/amazon/aws/sensors/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sqs.py rename to providers/tests/amazon/aws/sensors/test_sqs.py diff --git a/tests/providers/amazon/aws/sensors/test_step_function.py b/providers/tests/amazon/aws/sensors/test_step_function.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_step_function.py rename to providers/tests/amazon/aws/sensors/test_step_function.py diff --git a/tests/integration/providers/ydb/operators/__init__.py b/providers/tests/amazon/aws/system/__init__.py similarity index 100% rename from tests/integration/providers/ydb/operators/__init__.py rename to providers/tests/amazon/aws/system/__init__.py diff --git a/tests/providers/__init__.py b/providers/tests/amazon/aws/system/utils/__init__.py similarity index 100% rename from tests/providers/__init__.py rename to providers/tests/amazon/aws/system/utils/__init__.py diff --git a/tests/providers/amazon/aws/system/utils/test_helpers.py b/providers/tests/amazon/aws/system/utils/test_helpers.py similarity index 96% rename from tests/providers/amazon/aws/system/utils/test_helpers.py rename to providers/tests/amazon/aws/system/utils/test_helpers.py index f48de1788b74..20324ebca945 100644 --- a/tests/providers/amazon/aws/system/utils/test_helpers.py +++ b/providers/tests/amazon/aws/system/utils/test_helpers.py @@ -16,7 +16,7 @@ # under the License. """ This module contains the unit tests for the helper methods included in the Amazon System Tests found at -tests/system/providers/amazon/aws/utils/__init__.py +providers/tests/system/amazon/aws/utils/__init__.py """ from __future__ import annotations @@ -29,8 +29,8 @@ import pytest from moto import mock_aws -from tests.system.providers.amazon.aws import utils -from tests.system.providers.amazon.aws.utils import ( +from providers.tests.system.amazon.aws import utils +from providers.tests.system.amazon.aws.utils import ( DEFAULT_ENV_ID_LEN, DEFAULT_ENV_ID_PREFIX, ENV_ID_ENVIRON_KEY, diff --git a/tests/providers/airbyte/__init__.py b/providers/tests/amazon/aws/transfers/__init__.py similarity index 100% rename from tests/providers/airbyte/__init__.py rename to providers/tests/amazon/aws/transfers/__init__.py diff --git a/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py b/providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py rename to providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_base.py b/providers/tests/amazon/aws/transfers/test_base.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_base.py rename to providers/tests/amazon/aws/transfers/test_base.py diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py rename to providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_exasol_to_s3.py b/providers/tests/amazon/aws/transfers/test_exasol_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_exasol_to_s3.py rename to providers/tests/amazon/aws/transfers/test_exasol_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py b/providers/tests/amazon/aws/transfers/test_ftp_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_ftp_to_s3.py rename to providers/tests/amazon/aws/transfers/test_ftp_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py b/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_gcs_to_s3.py rename to providers/tests/amazon/aws/transfers/test_gcs_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_glacier_to_gcs.py b/providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_glacier_to_gcs.py rename to providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py b/providers/tests/amazon/aws/transfers/test_google_api_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_google_api_to_s3.py rename to providers/tests/amazon/aws/transfers/test_google_api_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py rename to providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py diff --git a/tests/providers/amazon/aws/transfers/test_http_to_s3.py b/providers/tests/amazon/aws/transfers/test_http_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_http_to_s3.py rename to providers/tests/amazon/aws/transfers/test_http_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py b/providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py rename to providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/providers/tests/amazon/aws/transfers/test_local_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_local_to_s3.py rename to providers/tests/amazon/aws/transfers/test_local_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py b/providers/tests/amazon/aws/transfers/test_mongo_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_mongo_to_s3.py rename to providers/tests/amazon/aws/transfers/test_mongo_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py similarity index 99% rename from tests/providers/amazon/aws/transfers/test_redshift_to_s3.py rename to providers/tests/amazon/aws/transfers/test_redshift_to_s3.py index 2d28acd22e7e..e27ff2d0aa56 100644 --- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py @@ -27,7 +27,8 @@ from airflow.models.connection import Connection from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.utils.redshift import build_credentials_block -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces class TestRedshiftToS3Transfer: diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_dynamodb.py b/providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_dynamodb.py rename to providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py b/providers/tests/amazon/aws/transfers/test_s3_to_ftp.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_ftp.py rename to providers/tests/amazon/aws/transfers/test_s3_to_ftp.py diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py similarity index 99% rename from tests/providers/amazon/aws/transfers/test_s3_to_redshift.py rename to providers/tests/amazon/aws/transfers/test_s3_to_redshift.py index b80c5991626c..6e300791e093 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py @@ -33,7 +33,8 @@ SchemaDatasetFacet, SchemaDatasetFacetFields, ) -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces class TestS3ToRedshiftTransfer: diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py similarity index 98% rename from tests/providers/amazon/aws/transfers/test_s3_to_sftp.py rename to providers/tests/amazon/aws/transfers/test_s3_to_sftp.py index 58f8a2d6f0f7..545398d9666f 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py @@ -27,7 +27,8 @@ from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sql.py b/providers/tests/amazon/aws/transfers/test_s3_to_sql.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_sql.py rename to providers/tests/amazon/aws/transfers/test_s3_to_sql.py diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py b/providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py rename to providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py similarity index 98% rename from tests/providers/amazon/aws/transfers/test_sftp_to_s3.py rename to providers/tests/amazon/aws/transfers/test_sftp_to_s3.py index be438a85a495..e4afe5c8c1ef 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py @@ -27,7 +27,8 @@ from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py b/providers/tests/amazon/aws/transfers/test_sql_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_sql_to_s3.py rename to providers/tests/amazon/aws/transfers/test_sql_to_s3.py diff --git a/tests/providers/airbyte/hooks/__init__.py b/providers/tests/amazon/aws/triggers/__init__.py similarity index 100% rename from tests/providers/airbyte/hooks/__init__.py rename to providers/tests/amazon/aws/triggers/__init__.py diff --git a/tests/providers/amazon/aws/triggers/test_athena.py b/providers/tests/amazon/aws/triggers/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_athena.py rename to providers/tests/amazon/aws/triggers/test_athena.py diff --git a/tests/providers/amazon/aws/triggers/test_base.py b/providers/tests/amazon/aws/triggers/test_base.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_base.py rename to providers/tests/amazon/aws/triggers/test_base.py diff --git a/tests/providers/amazon/aws/triggers/test_batch.py b/providers/tests/amazon/aws/triggers/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_batch.py rename to providers/tests/amazon/aws/triggers/test_batch.py diff --git a/tests/providers/amazon/aws/triggers/test_bedrock.py b/providers/tests/amazon/aws/triggers/test_bedrock.py similarity index 99% rename from tests/providers/amazon/aws/triggers/test_bedrock.py rename to providers/tests/amazon/aws/triggers/test_bedrock.py index 90112d8c1dc1..a619d39b1307 100644 --- a/tests/providers/amazon/aws/triggers/test_bedrock.py +++ b/providers/tests/amazon/aws/triggers/test_bedrock.py @@ -29,7 +29,8 @@ BedrockProvisionModelThroughputCompletedTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.bedrock." diff --git a/tests/providers/amazon/aws/triggers/test_comprehend.py b/providers/tests/amazon/aws/triggers/test_comprehend.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_comprehend.py rename to providers/tests/amazon/aws/triggers/test_comprehend.py index f70d9fc0d696..c7a53e2d70fc 100644 --- a/tests/providers/amazon/aws/triggers/test_comprehend.py +++ b/providers/tests/amazon/aws/triggers/test_comprehend.py @@ -27,7 +27,8 @@ ComprehendPiiEntitiesDetectionJobCompletedTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.comprehend." diff --git a/tests/providers/amazon/aws/triggers/test_ec2.py b/providers/tests/amazon/aws/triggers/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_ec2.py rename to providers/tests/amazon/aws/triggers/test_ec2.py diff --git a/tests/providers/amazon/aws/triggers/test_ecs.py b/providers/tests/amazon/aws/triggers/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_ecs.py rename to providers/tests/amazon/aws/triggers/test_ecs.py diff --git a/tests/providers/amazon/aws/triggers/test_eks.py b/providers/tests/amazon/aws/triggers/test_eks.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_eks.py rename to providers/tests/amazon/aws/triggers/test_eks.py diff --git a/tests/providers/amazon/aws/triggers/test_emr.py b/providers/tests/amazon/aws/triggers/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_emr.py rename to providers/tests/amazon/aws/triggers/test_emr.py diff --git a/tests/providers/amazon/aws/triggers/test_glue.py b/providers/tests/amazon/aws/triggers/test_glue.py similarity index 99% rename from tests/providers/amazon/aws/triggers/test_glue.py rename to providers/tests/amazon/aws/triggers/test_glue.py index 2ae3e57084ea..e39e38d8b760 100644 --- a/tests/providers/amazon/aws/triggers/test_glue.py +++ b/providers/tests/amazon/aws/triggers/test_glue.py @@ -32,7 +32,8 @@ GlueJobCompleteTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.glue." diff --git a/tests/providers/amazon/aws/triggers/test_glue_crawler.py b/providers/tests/amazon/aws/triggers/test_glue_crawler.py similarity index 97% rename from tests/providers/amazon/aws/triggers/test_glue_crawler.py rename to providers/tests/amazon/aws/triggers/test_glue_crawler.py index 8975aa1aff50..fadc14fa0f27 100644 --- a/tests/providers/amazon/aws/triggers/test_glue_crawler.py +++ b/providers/tests/amazon/aws/triggers/test_glue_crawler.py @@ -24,7 +24,8 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.triggers.glue_crawler import GlueCrawlerCompleteTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type class TestGlueCrawlerCompleteTrigger: diff --git a/tests/providers/amazon/aws/triggers/test_glue_databrew.py b/providers/tests/amazon/aws/triggers/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_glue_databrew.py rename to providers/tests/amazon/aws/triggers/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/triggers/test_kinesis_analytics.py b/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_kinesis_analytics.py rename to providers/tests/amazon/aws/triggers/test_kinesis_analytics.py index 3692905f2299..6a0f32154506 100644 --- a/tests/providers/amazon/aws/triggers/test_kinesis_analytics.py +++ b/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py @@ -26,7 +26,8 @@ KinesisAnalyticsV2ApplicationOperationCompleteTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.kinesis_analytics." diff --git a/tests/providers/amazon/aws/triggers/test_lambda_function.py b/providers/tests/amazon/aws/triggers/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_lambda_function.py rename to providers/tests/amazon/aws/triggers/test_lambda_function.py diff --git a/tests/providers/amazon/aws/triggers/test_neptune.py b/providers/tests/amazon/aws/triggers/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_neptune.py rename to providers/tests/amazon/aws/triggers/test_neptune.py diff --git a/tests/providers/amazon/aws/triggers/test_opensearch_serverless.py b/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_opensearch_serverless.py rename to providers/tests/amazon/aws/triggers/test_opensearch_serverless.py index c992d6a50da6..429b406072c5 100644 --- a/tests/providers/amazon/aws/triggers/test_opensearch_serverless.py +++ b/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py @@ -27,7 +27,8 @@ ) from airflow.triggers.base import TriggerEvent from airflow.utils.helpers import prune_dict -from tests.providers.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger + +from providers.tests.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.opensearch_serverless." diff --git a/tests/providers/amazon/aws/triggers/test_rds.py b/providers/tests/amazon/aws/triggers/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_rds.py rename to providers/tests/amazon/aws/triggers/test_rds.py diff --git a/tests/providers/amazon/aws/triggers/test_redshift_cluster.py b/providers/tests/amazon/aws/triggers/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_redshift_cluster.py rename to providers/tests/amazon/aws/triggers/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/triggers/test_redshift_data.py b/providers/tests/amazon/aws/triggers/test_redshift_data.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_redshift_data.py rename to providers/tests/amazon/aws/triggers/test_redshift_data.py diff --git a/tests/providers/amazon/aws/triggers/test_s3.py b/providers/tests/amazon/aws/triggers/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_s3.py rename to providers/tests/amazon/aws/triggers/test_s3.py diff --git a/tests/providers/amazon/aws/triggers/test_sagemaker.py b/providers/tests/amazon/aws/triggers/test_sagemaker.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_sagemaker.py rename to providers/tests/amazon/aws/triggers/test_sagemaker.py diff --git a/tests/providers/amazon/aws/triggers/test_serialization.py b/providers/tests/amazon/aws/triggers/test_serialization.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_serialization.py rename to providers/tests/amazon/aws/triggers/test_serialization.py diff --git a/tests/providers/amazon/aws/triggers/test_sqs.py b/providers/tests/amazon/aws/triggers/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_sqs.py rename to providers/tests/amazon/aws/triggers/test_sqs.py diff --git a/tests/providers/airbyte/operators/__init__.py b/providers/tests/amazon/aws/utils/__init__.py similarity index 100% rename from tests/providers/airbyte/operators/__init__.py rename to providers/tests/amazon/aws/utils/__init__.py diff --git a/tests/providers/amazon/aws/utils/eks_test_constants.py b/providers/tests/amazon/aws/utils/eks_test_constants.py similarity index 100% rename from tests/providers/amazon/aws/utils/eks_test_constants.py rename to providers/tests/amazon/aws/utils/eks_test_constants.py diff --git a/tests/providers/amazon/aws/utils/eks_test_utils.py b/providers/tests/amazon/aws/utils/eks_test_utils.py similarity index 99% rename from tests/providers/amazon/aws/utils/eks_test_utils.py rename to providers/tests/amazon/aws/utils/eks_test_utils.py index fc75880f21f4..8bc9bd9e2622 100644 --- a/tests/providers/amazon/aws/utils/eks_test_utils.py +++ b/providers/tests/amazon/aws/utils/eks_test_utils.py @@ -21,7 +21,7 @@ from copy import deepcopy from typing import TYPE_CHECKING, Pattern, Type, Union -from tests.providers.amazon.aws.utils.eks_test_constants import ( +from providers.tests.amazon.aws.utils.eks_test_constants import ( STATUS, ClusterAttributes, ClusterInputs, diff --git a/tests/providers/amazon/aws/utils/test_connection_wrapper.py b/providers/tests/amazon/aws/utils/test_connection_wrapper.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_connection_wrapper.py rename to providers/tests/amazon/aws/utils/test_connection_wrapper.py diff --git a/tests/providers/amazon/aws/utils/test_eks_get_token.py b/providers/tests/amazon/aws/utils/test_eks_get_token.py similarity index 97% rename from tests/providers/amazon/aws/utils/test_eks_get_token.py rename to providers/tests/amazon/aws/utils/test_eks_get_token.py index 672ccfc9b3fd..1d6300e45851 100644 --- a/tests/providers/amazon/aws/utils/test_eks_get_token.py +++ b/providers/tests/amazon/aws/utils/test_eks_get_token.py @@ -63,13 +63,13 @@ class TestGetEksToken: ], ], ) - def test_run(self, mock_eks_hook, args, expected_aws_conn_id, expected_region_name, airflow_root_path): + def test_run(self, mock_eks_hook, args, expected_aws_conn_id, expected_region_name, providers_src_folder): ( mock_eks_hook.return_value.fetch_access_token_for_cluster.return_value ) = "k8s-aws-v1.aHR0cDovL2V4YW1wbGUuY29t" with mock.patch("sys.argv", args), contextlib.redirect_stdout(StringIO()) as temp_stdout: - os.chdir(airflow_root_path) + os.chdir(providers_src_folder) # We are not using run_module because of https://github.com/pytest-dev/pytest/issues/9007 runpy.run_path("airflow/providers/amazon/aws/utils/eks_get_token.py", run_name="__main__") output = temp_stdout.getvalue() diff --git a/tests/providers/amazon/aws/utils/test_emailer.py b/providers/tests/amazon/aws/utils/test_emailer.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_emailer.py rename to providers/tests/amazon/aws/utils/test_emailer.py diff --git a/tests/providers/amazon/aws/utils/test_identifiers.py b/providers/tests/amazon/aws/utils/test_identifiers.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_identifiers.py rename to providers/tests/amazon/aws/utils/test_identifiers.py diff --git a/tests/providers/amazon/aws/utils/test_mixins.py b/providers/tests/amazon/aws/utils/test_mixins.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_mixins.py rename to providers/tests/amazon/aws/utils/test_mixins.py diff --git a/tests/providers/amazon/aws/utils/test_openlineage.py b/providers/tests/amazon/aws/utils/test_openlineage.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_openlineage.py rename to providers/tests/amazon/aws/utils/test_openlineage.py diff --git a/tests/providers/amazon/aws/utils/test_redshift.py b/providers/tests/amazon/aws/utils/test_redshift.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_redshift.py rename to providers/tests/amazon/aws/utils/test_redshift.py diff --git a/tests/providers/amazon/aws/utils/test_sqs.py b/providers/tests/amazon/aws/utils/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_sqs.py rename to providers/tests/amazon/aws/utils/test_sqs.py diff --git a/tests/providers/amazon/aws/utils/test_suppress.py b/providers/tests/amazon/aws/utils/test_suppress.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_suppress.py rename to providers/tests/amazon/aws/utils/test_suppress.py diff --git a/tests/providers/amazon/aws/utils/test_tags.py b/providers/tests/amazon/aws/utils/test_tags.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_tags.py rename to providers/tests/amazon/aws/utils/test_tags.py diff --git a/tests/providers/amazon/aws/utils/test_task_log_fetcher.py b/providers/tests/amazon/aws/utils/test_task_log_fetcher.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_task_log_fetcher.py rename to providers/tests/amazon/aws/utils/test_task_log_fetcher.py diff --git a/tests/providers/amazon/aws/utils/test_template_fields.py b/providers/tests/amazon/aws/utils/test_template_fields.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_template_fields.py rename to providers/tests/amazon/aws/utils/test_template_fields.py diff --git a/tests/providers/amazon/aws/utils/test_utils.py b/providers/tests/amazon/aws/utils/test_utils.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_utils.py rename to providers/tests/amazon/aws/utils/test_utils.py diff --git a/tests/providers/amazon/aws/utils/test_waiter.py b/providers/tests/amazon/aws/utils/test_waiter.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_waiter.py rename to providers/tests/amazon/aws/utils/test_waiter.py diff --git a/tests/providers/amazon/aws/utils/test_waiter_with_logging.py b/providers/tests/amazon/aws/utils/test_waiter_with_logging.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_waiter_with_logging.py rename to providers/tests/amazon/aws/utils/test_waiter_with_logging.py diff --git a/tests/providers/airbyte/sensors/__init__.py b/providers/tests/amazon/aws/waiters/__init__.py similarity index 100% rename from tests/providers/airbyte/sensors/__init__.py rename to providers/tests/amazon/aws/waiters/__init__.py diff --git a/tests/providers/amazon/aws/waiters/test.json b/providers/tests/amazon/aws/waiters/test.json similarity index 100% rename from tests/providers/amazon/aws/waiters/test.json rename to providers/tests/amazon/aws/waiters/test.json diff --git a/tests/providers/amazon/aws/waiters/test_batch.py b/providers/tests/amazon/aws/waiters/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_batch.py rename to providers/tests/amazon/aws/waiters/test_batch.py diff --git a/tests/providers/amazon/aws/waiters/test_bedrock.py b/providers/tests/amazon/aws/waiters/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_bedrock.py rename to providers/tests/amazon/aws/waiters/test_bedrock.py diff --git a/tests/providers/amazon/aws/waiters/test_bedrock_agent.py b/providers/tests/amazon/aws/waiters/test_bedrock_agent.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_bedrock_agent.py rename to providers/tests/amazon/aws/waiters/test_bedrock_agent.py diff --git a/tests/providers/amazon/aws/waiters/test_comprehend.py b/providers/tests/amazon/aws/waiters/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_comprehend.py rename to providers/tests/amazon/aws/waiters/test_comprehend.py diff --git a/tests/providers/amazon/aws/waiters/test_custom_waiters.py b/providers/tests/amazon/aws/waiters/test_custom_waiters.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_custom_waiters.py rename to providers/tests/amazon/aws/waiters/test_custom_waiters.py diff --git a/tests/providers/amazon/aws/waiters/test_dynamo.py b/providers/tests/amazon/aws/waiters/test_dynamo.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_dynamo.py rename to providers/tests/amazon/aws/waiters/test_dynamo.py diff --git a/tests/providers/amazon/aws/waiters/test_ecs.py b/providers/tests/amazon/aws/waiters/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_ecs.py rename to providers/tests/amazon/aws/waiters/test_ecs.py diff --git a/tests/providers/amazon/aws/waiters/test_eks.py b/providers/tests/amazon/aws/waiters/test_eks.py similarity index 97% rename from tests/providers/amazon/aws/waiters/test_eks.py rename to providers/tests/amazon/aws/waiters/test_eks.py index 9013c8b7c6fc..6a3aa8cf0273 100644 --- a/tests/providers/amazon/aws/waiters/test_eks.py +++ b/providers/tests/amazon/aws/waiters/test_eks.py @@ -22,7 +22,8 @@ from moto import mock_aws from airflow.providers.amazon.aws.hooks.eks import EksHook -from tests.providers.amazon.aws.waiters.test_custom_waiters import assert_all_match + +from providers.tests.amazon.aws.waiters.test_custom_waiters import assert_all_match class TestCustomEKSServiceWaiters: diff --git a/tests/providers/amazon/aws/waiters/test_emr.py b/providers/tests/amazon/aws/waiters/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_emr.py rename to providers/tests/amazon/aws/waiters/test_emr.py diff --git a/tests/providers/amazon/aws/waiters/test_glue.py b/providers/tests/amazon/aws/waiters/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_glue.py rename to providers/tests/amazon/aws/waiters/test_glue.py diff --git a/tests/providers/amazon/aws/waiters/test_glue_databrew.py b/providers/tests/amazon/aws/waiters/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_glue_databrew.py rename to providers/tests/amazon/aws/waiters/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/waiters/test_kinesis_analytics.py b/providers/tests/amazon/aws/waiters/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_kinesis_analytics.py rename to providers/tests/amazon/aws/waiters/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/waiters/test_neptune.py b/providers/tests/amazon/aws/waiters/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_neptune.py rename to providers/tests/amazon/aws/waiters/test_neptune.py diff --git a/tests/providers/amazon/aws/waiters/test_opensearch_serverless.py b/providers/tests/amazon/aws/waiters/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_opensearch_serverless.py rename to providers/tests/amazon/aws/waiters/test_opensearch_serverless.py diff --git a/tests/providers/amazon/conftest.py b/providers/tests/amazon/conftest.py similarity index 100% rename from tests/providers/amazon/conftest.py rename to providers/tests/amazon/conftest.py diff --git a/tests/providers/airbyte/triggers/__init__.py b/providers/tests/apache/__init__.py similarity index 100% rename from tests/providers/airbyte/triggers/__init__.py rename to providers/tests/apache/__init__.py diff --git a/tests/providers/alibaba/__init__.py b/providers/tests/apache/beam/__init__.py similarity index 100% rename from tests/providers/alibaba/__init__.py rename to providers/tests/apache/beam/__init__.py diff --git a/tests/providers/alibaba/cloud/__init__.py b/providers/tests/apache/beam/hooks/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/__init__.py rename to providers/tests/apache/beam/hooks/__init__.py diff --git a/tests/providers/apache/beam/hooks/test_beam.py b/providers/tests/apache/beam/hooks/test_beam.py similarity index 100% rename from tests/providers/apache/beam/hooks/test_beam.py rename to providers/tests/apache/beam/hooks/test_beam.py diff --git a/tests/providers/alibaba/cloud/hooks/__init__.py b/providers/tests/apache/beam/operators/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/hooks/__init__.py rename to providers/tests/apache/beam/operators/__init__.py diff --git a/tests/providers/apache/beam/operators/test_beam.py b/providers/tests/apache/beam/operators/test_beam.py similarity index 100% rename from tests/providers/apache/beam/operators/test_beam.py rename to providers/tests/apache/beam/operators/test_beam.py diff --git a/tests/providers/alibaba/cloud/log/__init__.py b/providers/tests/apache/beam/triggers/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/log/__init__.py rename to providers/tests/apache/beam/triggers/__init__.py diff --git a/tests/providers/apache/beam/triggers/test_beam.py b/providers/tests/apache/beam/triggers/test_beam.py similarity index 100% rename from tests/providers/apache/beam/triggers/test_beam.py rename to providers/tests/apache/beam/triggers/test_beam.py diff --git a/tests/providers/alibaba/cloud/operators/__init__.py b/providers/tests/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/__init__.py rename to providers/tests/apache/cassandra/__init__.py diff --git a/tests/providers/alibaba/cloud/sensors/__init__.py b/providers/tests/apache/cassandra/sensors/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/__init__.py rename to providers/tests/apache/cassandra/sensors/__init__.py diff --git a/tests/providers/apache/cassandra/sensors/test_record.py b/providers/tests/apache/cassandra/sensors/test_record.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/test_record.py rename to providers/tests/apache/cassandra/sensors/test_record.py diff --git a/tests/providers/apache/cassandra/sensors/test_table.py b/providers/tests/apache/cassandra/sensors/test_table.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/test_table.py rename to providers/tests/apache/cassandra/sensors/test_table.py diff --git a/airflow/providers/vertica/operators/__init__.py b/providers/tests/apache/drill/__init__.py similarity index 100% rename from airflow/providers/vertica/operators/__init__.py rename to providers/tests/apache/drill/__init__.py diff --git a/airflow/providers/zendesk/hooks/__init__.py b/providers/tests/apache/drill/hooks/__init__.py similarity index 100% rename from airflow/providers/zendesk/hooks/__init__.py rename to providers/tests/apache/drill/hooks/__init__.py diff --git a/tests/providers/apache/drill/hooks/test_drill.py b/providers/tests/apache/drill/hooks/test_drill.py similarity index 94% rename from tests/providers/apache/drill/hooks/test_drill.py rename to providers/tests/apache/drill/hooks/test_drill.py index bfedffd3d798..a02b74545531 100644 --- a/tests/providers/apache/drill/hooks/test_drill.py +++ b/providers/tests/apache/drill/hooks/test_drill.py @@ -26,9 +26,10 @@ @pytest.mark.parametrize("host, expect_error", [("host_with?", True), ("good_host", False)]) def test_get_host(host, expect_error): - with patch( - "airflow.providers.apache.drill.hooks.drill.DrillHook.get_connection" - ) as mock_get_connection, patch("sqlalchemy.engine.base.Engine.raw_connection") as raw_connection: + with ( + patch("airflow.providers.apache.drill.hooks.drill.DrillHook.get_connection") as mock_get_connection, + patch("sqlalchemy.engine.base.Engine.raw_connection") as raw_connection, + ): raw_connection.return_value = MagicMock() mock_get_connection.return_value = MagicMock( host=host, port=80, login="drill_user", password="secret" diff --git a/tests/providers/apache/drill/__init__.py b/providers/tests/apache/druid/__init__.py similarity index 100% rename from tests/providers/apache/drill/__init__.py rename to providers/tests/apache/druid/__init__.py diff --git a/tests/providers/apache/drill/hooks/__init__.py b/providers/tests/apache/druid/hooks/__init__.py similarity index 100% rename from tests/providers/apache/drill/hooks/__init__.py rename to providers/tests/apache/druid/hooks/__init__.py diff --git a/tests/providers/apache/druid/hooks/test_druid.py b/providers/tests/apache/druid/hooks/test_druid.py similarity index 100% rename from tests/providers/apache/druid/hooks/test_druid.py rename to providers/tests/apache/druid/hooks/test_druid.py diff --git a/tests/providers/apache/druid/__init__.py b/providers/tests/apache/druid/operators/__init__.py similarity index 100% rename from tests/providers/apache/druid/__init__.py rename to providers/tests/apache/druid/operators/__init__.py diff --git a/tests/providers/apache/druid/operators/test_druid.py b/providers/tests/apache/druid/operators/test_druid.py similarity index 100% rename from tests/providers/apache/druid/operators/test_druid.py rename to providers/tests/apache/druid/operators/test_druid.py diff --git a/tests/providers/alibaba/cloud/utils/__init__.py b/providers/tests/apache/druid/transfers/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/__init__.py rename to providers/tests/apache/druid/transfers/__init__.py diff --git a/tests/providers/apache/druid/transfers/test_hive_to_druid.py b/providers/tests/apache/druid/transfers/test_hive_to_druid.py similarity index 100% rename from tests/providers/apache/druid/transfers/test_hive_to_druid.py rename to providers/tests/apache/druid/transfers/test_hive_to_druid.py diff --git a/tests/providers/apache/druid/hooks/__init__.py b/providers/tests/apache/flink/__init__.py similarity index 100% rename from tests/providers/apache/druid/hooks/__init__.py rename to providers/tests/apache/flink/__init__.py diff --git a/tests/providers/apache/druid/operators/__init__.py b/providers/tests/apache/flink/operators/__init__.py similarity index 100% rename from tests/providers/apache/druid/operators/__init__.py rename to providers/tests/apache/flink/operators/__init__.py diff --git a/tests/providers/apache/flink/operators/test_flink_kubernetes.py b/providers/tests/apache/flink/operators/test_flink_kubernetes.py similarity index 100% rename from tests/providers/apache/flink/operators/test_flink_kubernetes.py rename to providers/tests/apache/flink/operators/test_flink_kubernetes.py diff --git a/tests/providers/apache/flink/__init__.py b/providers/tests/apache/flink/sensors/__init__.py similarity index 100% rename from tests/providers/apache/flink/__init__.py rename to providers/tests/apache/flink/sensors/__init__.py diff --git a/tests/providers/apache/flink/sensors/test_flink_kubernetes.py b/providers/tests/apache/flink/sensors/test_flink_kubernetes.py similarity index 100% rename from tests/providers/apache/flink/sensors/test_flink_kubernetes.py rename to providers/tests/apache/flink/sensors/test_flink_kubernetes.py diff --git a/tests/providers/apache/flink/operators/__init__.py b/providers/tests/apache/hdfs/__init__.py similarity index 100% rename from tests/providers/apache/flink/operators/__init__.py rename to providers/tests/apache/hdfs/__init__.py diff --git a/tests/providers/apache/flink/sensors/__init__.py b/providers/tests/apache/hdfs/hooks/__init__.py similarity index 100% rename from tests/providers/apache/flink/sensors/__init__.py rename to providers/tests/apache/hdfs/hooks/__init__.py diff --git a/tests/providers/apache/hdfs/hooks/test_webhdfs.py b/providers/tests/apache/hdfs/hooks/test_webhdfs.py similarity index 100% rename from tests/providers/apache/hdfs/hooks/test_webhdfs.py rename to providers/tests/apache/hdfs/hooks/test_webhdfs.py diff --git a/tests/providers/apache/hdfs/__init__.py b/providers/tests/apache/hdfs/sensors/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/__init__.py rename to providers/tests/apache/hdfs/sensors/__init__.py diff --git a/tests/providers/apache/hdfs/sensors/test_web_hdfs.py b/providers/tests/apache/hdfs/sensors/test_web_hdfs.py similarity index 100% rename from tests/providers/apache/hdfs/sensors/test_web_hdfs.py rename to providers/tests/apache/hdfs/sensors/test_web_hdfs.py diff --git a/tests/providers/apache/hive/__init__.py b/providers/tests/apache/hive/__init__.py similarity index 100% rename from tests/providers/apache/hive/__init__.py rename to providers/tests/apache/hive/__init__.py diff --git a/tests/providers/apache/hdfs/hooks/__init__.py b/providers/tests/apache/hive/hooks/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/hooks/__init__.py rename to providers/tests/apache/hive/hooks/__init__.py diff --git a/tests/providers/apache/hive/hooks/query_results.csv b/providers/tests/apache/hive/hooks/query_results.csv similarity index 100% rename from tests/providers/apache/hive/hooks/query_results.csv rename to providers/tests/apache/hive/hooks/query_results.csv diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/providers/tests/apache/hive/hooks/test_hive.py similarity index 98% rename from tests/providers/apache/hive/hooks/test_hive.py rename to providers/tests/apache/hive/hooks/test_hive.py index 93494876175a..aee09db28088 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/providers/tests/apache/hive/hooks/test_hive.py @@ -33,14 +33,15 @@ from airflow.secrets.environment_variables import CONN_ENV_PREFIX from airflow.utils import timezone from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING -from tests.providers.apache.hive import ( + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces +from providers.tests.apache.hive import ( BaseMockConnectionCursor, InvalidHiveCliHook, MockHiveCliHook, MockHiveServer2Hook, MockSubProcess, ) -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces DEFAULT_DATE = timezone.datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() @@ -333,10 +334,11 @@ def setup_method(self): self.database = "airflow" self.partition_by = "ds" self.table = "static_babynames_partitioned" - with mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client" - ) as get_metastore_mock, mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" + with ( + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client" + ) as get_metastore_mock, + mock.patch("airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection"), ): get_metastore_mock.return_value = mock.MagicMock() @@ -419,11 +421,14 @@ def test_ha_hosts(self, socket_mock): assert socket_mock.socket.call_count == 2 def test_get_conn(self): - with mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook._find_valid_host" - ) as find_valid_host, mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" - ) as get_connection: + with ( + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook._find_valid_host" + ) as find_valid_host, + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" + ) as get_connection, + ): find_valid_host.return_value = mock.MagicMock(return_value="") get_connection.return_value = mock.MagicMock(return_value="") metastore_hook = HiveMetastoreHook() diff --git a/tests/providers/apache/hdfs/sensors/__init__.py b/providers/tests/apache/hive/macros/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/sensors/__init__.py rename to providers/tests/apache/hive/macros/__init__.py diff --git a/tests/providers/apache/hive/macros/test_hive.py b/providers/tests/apache/hive/macros/test_hive.py similarity index 100% rename from tests/providers/apache/hive/macros/test_hive.py rename to providers/tests/apache/hive/macros/test_hive.py diff --git a/tests/providers/apache/hive/hooks/__init__.py b/providers/tests/apache/hive/operators/__init__.py similarity index 100% rename from tests/providers/apache/hive/hooks/__init__.py rename to providers/tests/apache/hive/operators/__init__.py diff --git a/tests/providers/apache/hive/operators/test_hive.py b/providers/tests/apache/hive/operators/test_hive.py similarity index 99% rename from tests/providers/apache/hive/operators/test_hive.py rename to providers/tests/apache/hive/operators/test_hive.py index f02f69c2a482..cda19f9c2a05 100644 --- a/tests/providers/apache/hive/operators/test_hive.py +++ b/providers/tests/apache/hive/operators/test_hive.py @@ -26,7 +26,8 @@ from airflow.models import DagRun, TaskInstance from airflow.providers.apache.hive.operators.hive import HiveOperator from airflow.utils import timezone -from tests.providers.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment class HiveOperatorConfigTest(TestHiveEnvironment): diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/providers/tests/apache/hive/operators/test_hive_stats.py similarity index 99% rename from tests/providers/apache/hive/operators/test_hive_stats.py rename to providers/tests/apache/hive/operators/test_hive_stats.py index e419d2da00b9..cce576515cb6 100644 --- a/tests/providers/apache/hive/operators/test_hive_stats.py +++ b/providers/tests/apache/hive/operators/test_hive_stats.py @@ -26,7 +26,8 @@ from airflow.exceptions import AirflowException from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator from airflow.providers.presto.hooks.presto import PrestoHook -from tests.providers.apache.hive import ( + +from providers.tests.apache.hive import ( DEFAULT_DATE, DEFAULT_DATE_DS, MockConnectionCursor, diff --git a/tests/providers/apache/hive/macros/__init__.py b/providers/tests/apache/hive/sensors/__init__.py similarity index 100% rename from tests/providers/apache/hive/macros/__init__.py rename to providers/tests/apache/hive/sensors/__init__.py diff --git a/tests/providers/apache/hive/sensors/test_hive_partition.py b/providers/tests/apache/hive/sensors/test_hive_partition.py similarity index 96% rename from tests/providers/apache/hive/sensors/test_hive_partition.py rename to providers/tests/apache/hive/sensors/test_hive_partition.py index 45e10783d81e..4df701bfe697 100644 --- a/tests/providers/apache/hive/sensors/test_hive_partition.py +++ b/providers/tests/apache/hive/sensors/test_hive_partition.py @@ -23,7 +23,8 @@ import pytest from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor -from tests.providers.apache.hive import DEFAULT_DATE, MockHiveMetastoreHook, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, MockHiveMetastoreHook, TestHiveEnvironment @pytest.mark.skipif( diff --git a/tests/providers/apache/hive/sensors/test_metastore_partition.py b/providers/tests/apache/hive/sensors/test_metastore_partition.py similarity index 96% rename from tests/providers/apache/hive/sensors/test_metastore_partition.py rename to providers/tests/apache/hive/sensors/test_metastore_partition.py index 04a251339dc3..3acb19678a5d 100644 --- a/tests/providers/apache/hive/sensors/test_metastore_partition.py +++ b/providers/tests/apache/hive/sensors/test_metastore_partition.py @@ -23,7 +23,8 @@ import pytest from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor -from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, MockDBConnection, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, MockDBConnection, TestHiveEnvironment @pytest.mark.skipif( diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py b/providers/tests/apache/hive/sensors/test_named_hive_partition.py similarity index 99% rename from tests/providers/apache/hive/sensors/test_named_hive_partition.py rename to providers/tests/apache/hive/sensors/test_named_hive_partition.py index 01827692273a..ff565c733d8b 100644 --- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py +++ b/providers/tests/apache/hive/sensors/test_named_hive_partition.py @@ -27,7 +27,8 @@ from airflow.models.dag import DAG from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor from airflow.utils.timezone import datetime -from tests.providers.apache.hive import MockHiveMetastoreHook, TestHiveEnvironment + +from providers.tests.apache.hive import MockHiveMetastoreHook, TestHiveEnvironment DEFAULT_DATE = datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() diff --git a/tests/providers/amazon/__init__.py b/providers/tests/apache/hive/transfers/__init__.py similarity index 100% rename from tests/providers/amazon/__init__.py rename to providers/tests/apache/hive/transfers/__init__.py diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/providers/tests/apache/hive/transfers/test_hive_to_mysql.py similarity index 99% rename from tests/providers/apache/hive/transfers/test_hive_to_mysql.py rename to providers/tests/apache/hive/transfers/test_hive_to_mysql.py index a7e2cbe0e0d6..f7a4e9c01301 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py +++ b/providers/tests/apache/hive/transfers/test_hive_to_mysql.py @@ -26,7 +26,8 @@ from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator from airflow.utils import timezone from airflow.utils.operator_helpers import context_to_airflow_vars -from tests.providers.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment + +from providers.tests.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment DEFAULT_DATE = timezone.datetime(2015, 1, 1) diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/providers/tests/apache/hive/transfers/test_hive_to_samba.py similarity index 99% rename from tests/providers/apache/hive/transfers/test_hive_to_samba.py rename to providers/tests/apache/hive/transfers/test_hive_to_samba.py index 7eb50e44b337..7225e732db81 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py +++ b/providers/tests/apache/hive/transfers/test_hive_to_samba.py @@ -25,7 +25,8 @@ from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator from airflow.providers.samba.hooks.samba import SambaHook from airflow.utils.operator_helpers import context_to_airflow_vars -from tests.providers.apache.hive import ( + +from providers.tests.apache.hive import ( DEFAULT_DATE, MockConnectionCursor, MockHiveServer2Hook, diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/providers/tests/apache/hive/transfers/test_mssql_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_mssql_to_hive.py rename to providers/tests/apache/hive/transfers/test_mssql_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/providers/tests/apache/hive/transfers/test_mysql_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_mysql_to_hive.py rename to providers/tests/apache/hive/transfers/test_mysql_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py b/providers/tests/apache/hive/transfers/test_s3_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_s3_to_hive.py rename to providers/tests/apache/hive/transfers/test_s3_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py b/providers/tests/apache/hive/transfers/test_vertica_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_vertica_to_hive.py rename to providers/tests/apache/hive/transfers/test_vertica_to_hive.py diff --git a/tests/providers/amazon/aws/__init__.py b/providers/tests/apache/iceberg/__init__.py similarity index 100% rename from tests/providers/amazon/aws/__init__.py rename to providers/tests/apache/iceberg/__init__.py diff --git a/tests/providers/amazon/aws/assets/__init__.py b/providers/tests/apache/iceberg/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/assets/__init__.py rename to providers/tests/apache/iceberg/hooks/__init__.py diff --git a/tests/providers/apache/iceberg/hooks/test_iceberg.py b/providers/tests/apache/iceberg/hooks/test_iceberg.py similarity index 100% rename from tests/providers/apache/iceberg/hooks/test_iceberg.py rename to providers/tests/apache/iceberg/hooks/test_iceberg.py diff --git a/tests/providers/amazon/aws/auth_manager/__init__.py b/providers/tests/apache/impala/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/__init__.py rename to providers/tests/apache/impala/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/__init__.py b/providers/tests/apache/impala/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/avp/__init__.py rename to providers/tests/apache/impala/hooks/__init__.py diff --git a/tests/providers/apache/impala/hooks/test_impala.py b/providers/tests/apache/impala/hooks/test_impala.py similarity index 100% rename from tests/providers/apache/impala/hooks/test_impala.py rename to providers/tests/apache/impala/hooks/test_impala.py diff --git a/tests/providers/amazon/aws/auth_manager/cli/__init__.py b/providers/tests/apache/kafka/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/cli/__init__.py rename to providers/tests/apache/kafka/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/security_manager/__init__.py b/providers/tests/apache/kafka/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/tests/apache/kafka/hooks/__init__.py diff --git a/tests/providers/apache/kafka/hooks/test_base.py b/providers/tests/apache/kafka/hooks/test_base.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_base.py rename to providers/tests/apache/kafka/hooks/test_base.py diff --git a/tests/providers/apache/kafka/hooks/test_client.py b/providers/tests/apache/kafka/hooks/test_client.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_client.py rename to providers/tests/apache/kafka/hooks/test_client.py diff --git a/tests/providers/apache/kafka/hooks/test_consume.py b/providers/tests/apache/kafka/hooks/test_consume.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_consume.py rename to providers/tests/apache/kafka/hooks/test_consume.py diff --git a/tests/providers/apache/kafka/hooks/test_produce.py b/providers/tests/apache/kafka/hooks/test_produce.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_produce.py rename to providers/tests/apache/kafka/hooks/test_produce.py diff --git a/tests/providers/amazon/aws/auth_manager/views/__init__.py b/providers/tests/apache/kafka/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/views/__init__.py rename to providers/tests/apache/kafka/operators/__init__.py diff --git a/tests/providers/apache/kafka/operators/test_consume.py b/providers/tests/apache/kafka/operators/test_consume.py similarity index 98% rename from tests/providers/apache/kafka/operators/test_consume.py rename to providers/tests/apache/kafka/operators/test_consume.py index 699e0abe0643..7e02a273894b 100644 --- a/tests/providers/apache/kafka/operators/test_consume.py +++ b/providers/tests/apache/kafka/operators/test_consume.py @@ -64,7 +64,7 @@ def test_operator(self): operator = ConsumeFromTopicOperator( kafka_config_id="kafka_d", topics=["test"], - apply_function="tests.providers.apache.kafka.operators.test_consume._no_op", + apply_function="providers.tests.apache.kafka.operators.test_consume._no_op", task_id="test", poll_timeout=0.0001, ) diff --git a/tests/providers/apache/kafka/operators/test_produce.py b/providers/tests/apache/kafka/operators/test_produce.py similarity index 97% rename from tests/providers/apache/kafka/operators/test_produce.py rename to providers/tests/apache/kafka/operators/test_produce.py index 49559984f1d0..6c1b853ecef9 100644 --- a/tests/providers/apache/kafka/operators/test_produce.py +++ b/providers/tests/apache/kafka/operators/test_produce.py @@ -69,7 +69,7 @@ def test_operator_string(self): operator = ProduceToTopicOperator( kafka_config_id="kafka_d", topic="test_1", - producer_function="tests.providers.apache.kafka.operators.test_produce._simple_producer", + producer_function="providers.tests.apache.kafka.operators.test_produce._simple_producer", producer_function_args=(b"test", b"test"), task_id="test", synchronous=False, diff --git a/tests/providers/amazon/aws/config_templates/__init__.py b/providers/tests/apache/kafka/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/config_templates/__init__.py rename to providers/tests/apache/kafka/sensors/__init__.py diff --git a/tests/providers/apache/kafka/sensors/test_kafka.py b/providers/tests/apache/kafka/sensors/test_kafka.py similarity index 100% rename from tests/providers/apache/kafka/sensors/test_kafka.py rename to providers/tests/apache/kafka/sensors/test_kafka.py diff --git a/tests/providers/amazon/aws/executors/__init__.py b/providers/tests/apache/kafka/triggers/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/__init__.py rename to providers/tests/apache/kafka/triggers/__init__.py diff --git a/tests/providers/apache/kafka/triggers/test_await_message.py b/providers/tests/apache/kafka/triggers/test_await_message.py similarity index 96% rename from tests/providers/apache/kafka/triggers/test_await_message.py rename to providers/tests/apache/kafka/triggers/test_await_message.py index db17d5e4e758..0f2b3f6f6b07 100644 --- a/tests/providers/apache/kafka/triggers/test_await_message.py +++ b/providers/tests/apache/kafka/triggers/test_await_message.py @@ -100,7 +100,7 @@ async def test_trigger_run_good(self, mocker): trigger = AwaitMessageTrigger( kafka_config_id="kafka_d", - apply_function="tests.providers.apache.kafka.triggers.test_await_message.apply_function_true", + apply_function="providers.tests.apache.kafka.triggers.test_await_message.apply_function_true", topics=["noop"], poll_timeout=0.0001, poll_interval=5, @@ -117,7 +117,7 @@ async def test_trigger_run_bad(self, mocker): trigger = AwaitMessageTrigger( kafka_config_id="kafka_d", - apply_function="tests.providers.apache.kafka.triggers.test_await_message.apply_function_false", + apply_function="providers.tests.apache.kafka.triggers.test_await_message.apply_function_false", topics=["noop"], poll_timeout=0.0001, poll_interval=5, diff --git a/tests/providers/apache/hive/operators/__init__.py b/providers/tests/apache/kylin/__init__.py similarity index 100% rename from tests/providers/apache/hive/operators/__init__.py rename to providers/tests/apache/kylin/__init__.py diff --git a/tests/providers/apache/hive/sensors/__init__.py b/providers/tests/apache/kylin/hooks/__init__.py similarity index 100% rename from tests/providers/apache/hive/sensors/__init__.py rename to providers/tests/apache/kylin/hooks/__init__.py diff --git a/tests/providers/apache/kylin/hooks/test_kylin.py b/providers/tests/apache/kylin/hooks/test_kylin.py similarity index 100% rename from tests/providers/apache/kylin/hooks/test_kylin.py rename to providers/tests/apache/kylin/hooks/test_kylin.py diff --git a/tests/providers/apache/kylin/__init__.py b/providers/tests/apache/kylin/operators/__init__.py similarity index 100% rename from tests/providers/apache/kylin/__init__.py rename to providers/tests/apache/kylin/operators/__init__.py diff --git a/tests/providers/apache/kylin/operators/test_kylin_cube.py b/providers/tests/apache/kylin/operators/test_kylin_cube.py similarity index 100% rename from tests/providers/apache/kylin/operators/test_kylin_cube.py rename to providers/tests/apache/kylin/operators/test_kylin_cube.py diff --git a/tests/providers/amazon/aws/executors/batch/__init__.py b/providers/tests/apache/livy/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/batch/__init__.py rename to providers/tests/apache/livy/__init__.py diff --git a/tests/providers/amazon/aws/executors/ecs/__init__.py b/providers/tests/apache/livy/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/ecs/__init__.py rename to providers/tests/apache/livy/hooks/__init__.py diff --git a/tests/providers/apache/livy/hooks/test_livy.py b/providers/tests/apache/livy/hooks/test_livy.py similarity index 99% rename from tests/providers/apache/livy/hooks/test_livy.py rename to providers/tests/apache/livy/hooks/test_livy.py index d3c110e68b4b..0fca347b06a9 100644 --- a/tests/providers/apache/livy/hooks/test_livy.py +++ b/providers/tests/apache/livy/hooks/test_livy.py @@ -30,7 +30,8 @@ from airflow.models import Connection from airflow.providers.apache.livy.hooks.livy import BatchState, LivyAsyncHook, LivyHook from airflow.utils import db -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/providers/amazon/aws/executors/utils/__init__.py b/providers/tests/apache/livy/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/utils/__init__.py rename to providers/tests/apache/livy/operators/__init__.py diff --git a/tests/providers/apache/livy/operators/test_livy.py b/providers/tests/apache/livy/operators/test_livy.py similarity index 100% rename from tests/providers/apache/livy/operators/test_livy.py rename to providers/tests/apache/livy/operators/test_livy.py diff --git a/tests/providers/amazon/aws/fs/__init__.py b/providers/tests/apache/livy/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/fs/__init__.py rename to providers/tests/apache/livy/sensors/__init__.py diff --git a/tests/providers/apache/livy/sensors/test_livy.py b/providers/tests/apache/livy/sensors/test_livy.py similarity index 100% rename from tests/providers/apache/livy/sensors/test_livy.py rename to providers/tests/apache/livy/sensors/test_livy.py diff --git a/tests/providers/amazon/aws/hooks/__init__.py b/providers/tests/apache/livy/triggers/__init__.py similarity index 100% rename from tests/providers/amazon/aws/hooks/__init__.py rename to providers/tests/apache/livy/triggers/__init__.py diff --git a/tests/providers/apache/livy/triggers/test_livy.py b/providers/tests/apache/livy/triggers/test_livy.py similarity index 100% rename from tests/providers/apache/livy/triggers/test_livy.py rename to providers/tests/apache/livy/triggers/test_livy.py diff --git a/tests/providers/apache/kylin/hooks/__init__.py b/providers/tests/apache/pig/__init__.py similarity index 100% rename from tests/providers/apache/kylin/hooks/__init__.py rename to providers/tests/apache/pig/__init__.py diff --git a/tests/providers/apache/kylin/operators/__init__.py b/providers/tests/apache/pig/hooks/__init__.py similarity index 100% rename from tests/providers/apache/kylin/operators/__init__.py rename to providers/tests/apache/pig/hooks/__init__.py diff --git a/tests/providers/apache/pig/hooks/test_pig.py b/providers/tests/apache/pig/hooks/test_pig.py similarity index 100% rename from tests/providers/apache/pig/hooks/test_pig.py rename to providers/tests/apache/pig/hooks/test_pig.py diff --git a/tests/providers/amazon/aws/infrastructure/__init__.py b/providers/tests/apache/pig/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/infrastructure/__init__.py rename to providers/tests/apache/pig/operators/__init__.py diff --git a/tests/providers/apache/pig/operators/test_pig.py b/providers/tests/apache/pig/operators/test_pig.py similarity index 100% rename from tests/providers/apache/pig/operators/test_pig.py rename to providers/tests/apache/pig/operators/test_pig.py diff --git a/tests/providers/apache/pig/__init__.py b/providers/tests/apache/pinot/__init__.py similarity index 100% rename from tests/providers/apache/pig/__init__.py rename to providers/tests/apache/pinot/__init__.py diff --git a/tests/providers/apache/pig/hooks/__init__.py b/providers/tests/apache/pinot/hooks/__init__.py similarity index 100% rename from tests/providers/apache/pig/hooks/__init__.py rename to providers/tests/apache/pinot/hooks/__init__.py diff --git a/tests/providers/apache/pinot/hooks/test_pinot.py b/providers/tests/apache/pinot/hooks/test_pinot.py similarity index 100% rename from tests/providers/apache/pinot/hooks/test_pinot.py rename to providers/tests/apache/pinot/hooks/test_pinot.py diff --git a/tests/providers/apache/pinot/__init__.py b/providers/tests/apache/spark/__init__.py similarity index 100% rename from tests/providers/apache/pinot/__init__.py rename to providers/tests/apache/spark/__init__.py diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py b/providers/tests/apache/spark/decorators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py rename to providers/tests/apache/spark/decorators/__init__.py diff --git a/tests/providers/apache/spark/decorators/test_pyspark.py b/providers/tests/apache/spark/decorators/test_pyspark.py similarity index 100% rename from tests/providers/apache/spark/decorators/test_pyspark.py rename to providers/tests/apache/spark/decorators/test_pyspark.py diff --git a/tests/providers/apache/pinot/hooks/__init__.py b/providers/tests/apache/spark/hooks/__init__.py similarity index 100% rename from tests/providers/apache/pinot/hooks/__init__.py rename to providers/tests/apache/spark/hooks/__init__.py diff --git a/tests/providers/apache/spark/hooks/test_spark_connect.py b/providers/tests/apache/spark/hooks/test_spark_connect.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_connect.py rename to providers/tests/apache/spark/hooks/test_spark_connect.py diff --git a/tests/providers/apache/spark/hooks/test_spark_jdbc.py b/providers/tests/apache/spark/hooks/test_spark_jdbc.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_jdbc.py rename to providers/tests/apache/spark/hooks/test_spark_jdbc.py diff --git a/tests/providers/apache/spark/hooks/test_spark_jdbc_script.py b/providers/tests/apache/spark/hooks/test_spark_jdbc_script.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_jdbc_script.py rename to providers/tests/apache/spark/hooks/test_spark_jdbc_script.py diff --git a/tests/providers/apache/spark/hooks/test_spark_sql.py b/providers/tests/apache/spark/hooks/test_spark_sql.py similarity index 99% rename from tests/providers/apache/spark/hooks/test_spark_sql.py rename to providers/tests/apache/spark/hooks/test_spark_sql.py index 9a33bfc2923d..10f63cce9932 100644 --- a/tests/providers/apache/spark/hooks/test_spark_sql.py +++ b/providers/tests/apache/spark/hooks/test_spark_sql.py @@ -27,7 +27,8 @@ from airflow.models import Connection from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook from airflow.utils import db -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.db_test diff --git a/tests/providers/apache/spark/hooks/test_spark_submit.py b/providers/tests/apache/spark/hooks/test_spark_submit.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_submit.py rename to providers/tests/apache/spark/hooks/test_spark_submit.py diff --git a/tests/providers/apache/spark/__init__.py b/providers/tests/apache/spark/operators/__init__.py similarity index 100% rename from tests/providers/apache/spark/__init__.py rename to providers/tests/apache/spark/operators/__init__.py diff --git a/tests/providers/apache/spark/operators/test_spark_jdbc.py b/providers/tests/apache/spark/operators/test_spark_jdbc.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_jdbc.py rename to providers/tests/apache/spark/operators/test_spark_jdbc.py diff --git a/tests/providers/apache/spark/operators/test_spark_sql.py b/providers/tests/apache/spark/operators/test_spark_sql.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_sql.py rename to providers/tests/apache/spark/operators/test_spark_sql.py diff --git a/tests/providers/apache/spark/operators/test_spark_submit.py b/providers/tests/apache/spark/operators/test_spark_submit.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_submit.py rename to providers/tests/apache/spark/operators/test_spark_submit.py diff --git a/tests/providers/amazon/aws/links/__init__.py b/providers/tests/apprise/__init__.py similarity index 100% rename from tests/providers/amazon/aws/links/__init__.py rename to providers/tests/apprise/__init__.py diff --git a/tests/providers/apache/spark/hooks/__init__.py b/providers/tests/apprise/hooks/__init__.py similarity index 100% rename from tests/providers/apache/spark/hooks/__init__.py rename to providers/tests/apprise/hooks/__init__.py diff --git a/tests/providers/apprise/hooks/test_apprise.py b/providers/tests/apprise/hooks/test_apprise.py similarity index 100% rename from tests/providers/apprise/hooks/test_apprise.py rename to providers/tests/apprise/hooks/test_apprise.py diff --git a/tests/providers/apache/spark/operators/__init__.py b/providers/tests/apprise/notifications/__init__.py similarity index 100% rename from tests/providers/apache/spark/operators/__init__.py rename to providers/tests/apprise/notifications/__init__.py diff --git a/tests/providers/apprise/notifications/test_apprise.py b/providers/tests/apprise/notifications/test_apprise.py similarity index 100% rename from tests/providers/apprise/notifications/test_apprise.py rename to providers/tests/apprise/notifications/test_apprise.py diff --git a/tests/providers/amazon/aws/log/__init__.py b/providers/tests/arangodb/__init__.py similarity index 100% rename from tests/providers/amazon/aws/log/__init__.py rename to providers/tests/arangodb/__init__.py diff --git a/tests/providers/amazon/aws/notifications/__init__.py b/providers/tests/arangodb/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/notifications/__init__.py rename to providers/tests/arangodb/hooks/__init__.py diff --git a/tests/providers/arangodb/hooks/test_arangodb.py b/providers/tests/arangodb/hooks/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/hooks/test_arangodb.py rename to providers/tests/arangodb/hooks/test_arangodb.py diff --git a/tests/providers/amazon/aws/operators/__init__.py b/providers/tests/arangodb/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/operators/__init__.py rename to providers/tests/arangodb/operators/__init__.py diff --git a/tests/providers/arangodb/operators/test_arangodb.py b/providers/tests/arangodb/operators/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/operators/test_arangodb.py rename to providers/tests/arangodb/operators/test_arangodb.py diff --git a/tests/providers/amazon/aws/secrets/__init__.py b/providers/tests/arangodb/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/secrets/__init__.py rename to providers/tests/arangodb/sensors/__init__.py diff --git a/tests/providers/arangodb/sensors/test_arangodb.py b/providers/tests/arangodb/sensors/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/sensors/test_arangodb.py rename to providers/tests/arangodb/sensors/test_arangodb.py diff --git a/tests/providers/apprise/hooks/__init__.py b/providers/tests/asana/__init__.py similarity index 100% rename from tests/providers/apprise/hooks/__init__.py rename to providers/tests/asana/__init__.py diff --git a/tests/providers/apprise/notifications/__init__.py b/providers/tests/asana/hooks/__init__.py similarity index 100% rename from tests/providers/apprise/notifications/__init__.py rename to providers/tests/asana/hooks/__init__.py diff --git a/tests/providers/asana/hooks/test_asana.py b/providers/tests/asana/hooks/test_asana.py similarity index 100% rename from tests/providers/asana/hooks/test_asana.py rename to providers/tests/asana/hooks/test_asana.py diff --git a/tests/providers/asana/__init__.py b/providers/tests/asana/operators/__init__.py similarity index 100% rename from tests/providers/asana/__init__.py rename to providers/tests/asana/operators/__init__.py diff --git a/tests/providers/asana/operators/test_asana_tasks.py b/providers/tests/asana/operators/test_asana_tasks.py similarity index 100% rename from tests/providers/asana/operators/test_asana_tasks.py rename to providers/tests/asana/operators/test_asana_tasks.py diff --git a/tests/providers/amazon/aws/sensors/__init__.py b/providers/tests/atlassian/__init__.py similarity index 100% rename from tests/providers/amazon/aws/sensors/__init__.py rename to providers/tests/atlassian/__init__.py diff --git a/tests/providers/amazon/aws/system/__init__.py b/providers/tests/atlassian/jira/__init__.py similarity index 100% rename from tests/providers/amazon/aws/system/__init__.py rename to providers/tests/atlassian/jira/__init__.py diff --git a/tests/providers/amazon/aws/system/utils/__init__.py b/providers/tests/atlassian/jira/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/system/utils/__init__.py rename to providers/tests/atlassian/jira/hooks/__init__.py diff --git a/tests/providers/atlassian/jira/hooks/test_jira.py b/providers/tests/atlassian/jira/hooks/test_jira.py similarity index 98% rename from tests/providers/atlassian/jira/hooks/test_jira.py rename to providers/tests/atlassian/jira/hooks/test_jira.py index 339274e317c5..e2cf9389471e 100644 --- a/tests/providers/atlassian/jira/hooks/test_jira.py +++ b/providers/tests/atlassian/jira/hooks/test_jira.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.atlassian.jira.hooks.jira import JiraHook -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json @pytest.fixture diff --git a/tests/providers/amazon/aws/transfers/__init__.py b/providers/tests/atlassian/jira/notifications/__init__.py similarity index 100% rename from tests/providers/amazon/aws/transfers/__init__.py rename to providers/tests/atlassian/jira/notifications/__init__.py diff --git a/tests/providers/atlassian/jira/notifications/test_jira.py b/providers/tests/atlassian/jira/notifications/test_jira.py similarity index 100% rename from tests/providers/atlassian/jira/notifications/test_jira.py rename to providers/tests/atlassian/jira/notifications/test_jira.py diff --git a/tests/providers/amazon/aws/triggers/__init__.py b/providers/tests/atlassian/jira/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/triggers/__init__.py rename to providers/tests/atlassian/jira/operators/__init__.py diff --git a/tests/providers/atlassian/jira/operators/test_jira.py b/providers/tests/atlassian/jira/operators/test_jira.py similarity index 98% rename from tests/providers/atlassian/jira/operators/test_jira.py rename to providers/tests/atlassian/jira/operators/test_jira.py index 9282089bae56..614c1a4939a9 100644 --- a/tests/providers/atlassian/jira/operators/test_jira.py +++ b/providers/tests/atlassian/jira/operators/test_jira.py @@ -24,7 +24,8 @@ from airflow.models import Connection from airflow.providers.atlassian.jira.operators.jira import JiraOperator from airflow.utils import timezone -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { diff --git a/tests/providers/amazon/aws/utils/__init__.py b/providers/tests/atlassian/jira/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/utils/__init__.py rename to providers/tests/atlassian/jira/sensors/__init__.py diff --git a/tests/providers/atlassian/jira/sensors/test_jira.py b/providers/tests/atlassian/jira/sensors/test_jira.py similarity index 97% rename from tests/providers/atlassian/jira/sensors/test_jira.py rename to providers/tests/atlassian/jira/sensors/test_jira.py index 5e2ce896b6c6..9cb2572969ad 100644 --- a/tests/providers/atlassian/jira/sensors/test_jira.py +++ b/providers/tests/atlassian/jira/sensors/test_jira.py @@ -24,7 +24,8 @@ from airflow.models import Connection from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor from airflow.utils import timezone -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { diff --git a/tests/providers/asana/hooks/__init__.py b/providers/tests/celery/__init__.py similarity index 100% rename from tests/providers/asana/hooks/__init__.py rename to providers/tests/celery/__init__.py diff --git a/tests/providers/amazon/aws/waiters/__init__.py b/providers/tests/celery/cli/__init__.py similarity index 100% rename from tests/providers/amazon/aws/waiters/__init__.py rename to providers/tests/celery/cli/__init__.py diff --git a/tests/providers/celery/cli/test_celery_command.py b/providers/tests/celery/cli/test_celery_command.py similarity index 99% rename from tests/providers/celery/cli/test_celery_command.py rename to providers/tests/celery/cli/test_celery_command.py index d7dcdc474191..03f5d60dabe4 100644 --- a/tests/providers/celery/cli/test_celery_command.py +++ b/providers/tests/celery/cli/test_celery_command.py @@ -30,8 +30,9 @@ from airflow.configuration import conf from airflow.executors import executor_loader from airflow.providers.celery.cli import celery_command -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/asana/operators/__init__.py b/providers/tests/celery/executors/__init__.py similarity index 100% rename from tests/providers/asana/operators/__init__.py rename to providers/tests/celery/executors/__init__.py diff --git a/tests/providers/celery/executors/test_celery_executor.py b/providers/tests/celery/executors/test_celery_executor.py similarity index 96% rename from tests/providers/celery/executors/test_celery_executor.py rename to providers/tests/celery/executors/test_celery_executor.py index 63ca4a47d255..261a11f837e4 100644 --- a/tests/providers/celery/executors/test_celery_executor.py +++ b/providers/tests/celery/executors/test_celery_executor.py @@ -41,9 +41,10 @@ from airflow.providers.celery.executors.celery_executor import CeleryExecutor from airflow.utils import timezone from airflow.utils.state import State -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -165,11 +166,15 @@ def test_command_validation(self, command, raise_exception): ValueError, match=r'The command must start with \["airflow", "tasks", "run"\]\.' ) - with mock.patch( - "airflow.providers.celery.executors.celery_executor_utils._execute_in_subprocess" - ) as mock_subproc, mock.patch( - "airflow.providers.celery.executors.celery_executor_utils._execute_in_fork" - ) as mock_fork, mock.patch("celery.app.task.Task.request") as mock_task: + with ( + mock.patch( + "airflow.providers.celery.executors.celery_executor_utils._execute_in_subprocess" + ) as mock_subproc, + mock.patch( + "airflow.providers.celery.executors.celery_executor_utils._execute_in_fork" + ) as mock_fork, + mock.patch("celery.app.task.Task.request") as mock_task, + ): mock_task.id = "abcdef-124215-abcdef" with expected_context: celery_executor_utils.execute_command(command) diff --git a/tests/providers/celery/executors/test_celery_kubernetes_executor.py b/providers/tests/celery/executors/test_celery_kubernetes_executor.py similarity index 100% rename from tests/providers/celery/executors/test_celery_kubernetes_executor.py rename to providers/tests/celery/executors/test_celery_kubernetes_executor.py diff --git a/tests/providers/apache/__init__.py b/providers/tests/celery/log_handlers/__init__.py similarity index 100% rename from tests/providers/apache/__init__.py rename to providers/tests/celery/log_handlers/__init__.py diff --git a/tests/providers/celery/log_handlers/test_log_handlers.py b/providers/tests/celery/log_handlers/test_log_handlers.py similarity index 96% rename from tests/providers/celery/log_handlers/test_log_handlers.py rename to providers/tests/celery/log_handlers/test_log_handlers.py index 5e93b076e41e..9eb9e33e2ae0 100644 --- a/tests/providers/celery/log_handlers/test_log_handlers.py +++ b/providers/tests/celery/log_handlers/test_log_handlers.py @@ -35,8 +35,9 @@ from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: pass diff --git a/tests/providers/celery/__init__.py b/providers/tests/celery/sensors/__init__.py similarity index 100% rename from tests/providers/celery/__init__.py rename to providers/tests/celery/sensors/__init__.py diff --git a/tests/providers/celery/sensors/test_celery_queue.py b/providers/tests/celery/sensors/test_celery_queue.py similarity index 100% rename from tests/providers/celery/sensors/test_celery_queue.py rename to providers/tests/celery/sensors/test_celery_queue.py diff --git a/tests/providers/celery/executors/__init__.py b/providers/tests/cloudant/__init__.py similarity index 100% rename from tests/providers/celery/executors/__init__.py rename to providers/tests/cloudant/__init__.py diff --git a/tests/providers/celery/sensors/__init__.py b/providers/tests/cloudant/hooks/__init__.py similarity index 100% rename from tests/providers/celery/sensors/__init__.py rename to providers/tests/cloudant/hooks/__init__.py diff --git a/tests/providers/cloudant/hooks/test_cloudant.py b/providers/tests/cloudant/hooks/test_cloudant.py similarity index 100% rename from tests/providers/cloudant/hooks/test_cloudant.py rename to providers/tests/cloudant/hooks/test_cloudant.py diff --git a/tests/providers/apache/beam/__init__.py b/providers/tests/cncf/__init__.py similarity index 100% rename from tests/providers/apache/beam/__init__.py rename to providers/tests/cncf/__init__.py diff --git a/tests/providers/cloudant/__init__.py b/providers/tests/cncf/kubernetes/__init__.py similarity index 100% rename from tests/providers/cloudant/__init__.py rename to providers/tests/cncf/kubernetes/__init__.py diff --git a/tests/providers/apache/beam/hooks/__init__.py b/providers/tests/cncf/kubernetes/cli/__init__.py similarity index 100% rename from tests/providers/apache/beam/hooks/__init__.py rename to providers/tests/cncf/kubernetes/cli/__init__.py diff --git a/tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py similarity index 99% rename from tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py rename to providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py index 790801a3bbcf..62019a8d2fe9 100644 --- a/tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py +++ b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py @@ -28,7 +28,8 @@ from airflow.cli import cli_parser from airflow.executors import executor_loader from airflow.providers.cncf.kubernetes.cli import kubernetes_command -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/cncf/kubernetes/conftest.py b/providers/tests/cncf/kubernetes/conftest.py similarity index 100% rename from tests/providers/cncf/kubernetes/conftest.py rename to providers/tests/cncf/kubernetes/conftest.py diff --git a/tests/providers/apache/beam/operators/__init__.py b/providers/tests/cncf/kubernetes/data_files/__init__.py similarity index 100% rename from tests/providers/apache/beam/operators/__init__.py rename to providers/tests/cncf/kubernetes/data_files/__init__.py diff --git a/tests/providers/apache/beam/triggers/__init__.py b/providers/tests/cncf/kubernetes/data_files/executor/__init__.py similarity index 100% rename from tests/providers/apache/beam/triggers/__init__.py rename to providers/tests/cncf/kubernetes/data_files/executor/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml b/providers/tests/cncf/kubernetes/data_files/executor/basic_template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml rename to providers/tests/cncf/kubernetes/data_files/executor/basic_template.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/kube_config b/providers/tests/cncf/kubernetes/data_files/kube_config similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/kube_config rename to providers/tests/cncf/kubernetes/data_files/kube_config diff --git a/tests/providers/apache/cassandra/__init__.py b/providers/tests/cncf/kubernetes/data_files/pods/__init__.py similarity index 100% rename from tests/providers/apache/cassandra/__init__.py rename to providers/tests/cncf/kubernetes/data_files/pods/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml b/providers/tests/cncf/kubernetes/data_files/pods/generator_base.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/generator_base.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml b/providers/tests/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/pods/template.yaml b/providers/tests/cncf/kubernetes/data_files/pods/template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/template.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/template.yaml diff --git a/tests/providers/apache/cassandra/sensors/__init__.py b/providers/tests/cncf/kubernetes/data_files/spark/__init__.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/__init__.py rename to providers/tests/cncf/kubernetes/data_files/spark/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_template.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test.json b/providers/tests/cncf/kubernetes/data_files/spark/application_test.json similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test.json rename to providers/tests/cncf/kubernetes/data_files/spark/application_test.json diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_test.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_test.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json b/providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json rename to providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml diff --git a/tests/providers/apache/druid/transfers/__init__.py b/providers/tests/cncf/kubernetes/decorators/__init__.py similarity index 100% rename from tests/providers/apache/druid/transfers/__init__.py rename to providers/tests/cncf/kubernetes/decorators/__init__.py diff --git a/tests/providers/cncf/kubernetes/decorators/test_kubernetes.py b/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py similarity index 100% rename from tests/providers/cncf/kubernetes/decorators/test_kubernetes.py rename to providers/tests/cncf/kubernetes/decorators/test_kubernetes.py diff --git a/tests/providers/apache/hive/transfers/__init__.py b/providers/tests/cncf/kubernetes/executors/__init__.py similarity index 100% rename from tests/providers/apache/hive/transfers/__init__.py rename to providers/tests/cncf/kubernetes/executors/__init__.py diff --git a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py similarity index 99% rename from tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py rename to providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 12435426dd89..5240bf0faecb 100644 --- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -54,8 +54,9 @@ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator from airflow.utils import timezone from airflow.utils.state import State, TaskInstanceState -from tests.test_utils.compat import BashOperator -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode @@ -1608,12 +1609,15 @@ def setup_method(self): self.events = [] def _run(self): - with mock.patch( - "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.watch" - ) as mock_watch, mock.patch.object( - KubernetesJobWatcher, - "_pod_events", - ) as mock_pod_events: + with ( + mock.patch( + "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.watch" + ) as mock_watch, + mock.patch.object( + KubernetesJobWatcher, + "_pod_events", + ) as mock_pod_events, + ): mock_watch.Watch.return_value.stream.return_value = self.events mock_pod_events.return_value = self.events latest_resource_version = self.watcher._run( diff --git a/tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_local_kubernetes_executor.py similarity index 100% rename from tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py rename to providers/tests/cncf/kubernetes/executors/test_local_kubernetes_executor.py diff --git a/tests/providers/apache/iceberg/__init__.py b/providers/tests/cncf/kubernetes/hooks/__init__.py similarity index 100% rename from tests/providers/apache/iceberg/__init__.py rename to providers/tests/cncf/kubernetes/hooks/__init__.py diff --git a/tests/providers/cncf/kubernetes/hooks/test_kubernetes.py b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py similarity index 99% rename from tests/providers/cncf/kubernetes/hooks/test_kubernetes.py rename to providers/tests/cncf/kubernetes/hooks/test_kubernetes.py index 065768def24e..a6847e9c2fe5 100644 --- a/tests/providers/cncf/kubernetes/hooks/test_kubernetes.py +++ b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py @@ -36,8 +36,9 @@ from airflow.providers.cncf.kubernetes.hooks.kubernetes import AsyncKubernetesHook, KubernetesHook from airflow.utils import db from airflow.utils.db import merge_conn -from tests.test_utils.db import clear_db_connections -from tests.test_utils.providers import get_provider_min_airflow_version + +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.providers import get_provider_min_airflow_version pytestmark = pytest.mark.db_test @@ -677,8 +678,9 @@ class TestKubernetesHookIncorrectConfiguration: ) def test_should_raise_exception_on_invalid_configuration(self, conn_uri): kubernetes_hook = KubernetesHook() - with mock.patch.dict("os.environ", AIRFLOW_CONN_KUBERNETES_DEFAULT=conn_uri), pytest.raises( - AirflowException, match="Invalid connection configuration" + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_KUBERNETES_DEFAULT=conn_uri), + pytest.raises(AirflowException, match="Invalid connection configuration"), ): kubernetes_hook.get_conn() diff --git a/tests/providers/apache/iceberg/hooks/__init__.py b/providers/tests/cncf/kubernetes/log_handlers/__init__.py similarity index 100% rename from tests/providers/apache/iceberg/hooks/__init__.py rename to providers/tests/cncf/kubernetes/log_handlers/__init__.py diff --git a/tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py similarity index 98% rename from tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py rename to providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py index 3f9bc963f0e6..b02f16ac9278 100644 --- a/tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py +++ b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py @@ -41,8 +41,9 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/cloudant/hooks/__init__.py b/providers/tests/cncf/kubernetes/models/__init__.py similarity index 100% rename from tests/providers/cloudant/hooks/__init__.py rename to providers/tests/cncf/kubernetes/models/__init__.py diff --git a/tests/providers/cncf/kubernetes/models/test_secret.py b/providers/tests/cncf/kubernetes/models/test_secret.py similarity index 100% rename from tests/providers/cncf/kubernetes/models/test_secret.py rename to providers/tests/cncf/kubernetes/models/test_secret.py diff --git a/tests/providers/apache/impala/__init__.py b/providers/tests/cncf/kubernetes/operators/__init__.py similarity index 100% rename from tests/providers/apache/impala/__init__.py rename to providers/tests/cncf/kubernetes/operators/__init__.py diff --git a/tests/providers/cncf/kubernetes/operators/test_custom_object_launcher.py b/providers/tests/cncf/kubernetes/operators/test_custom_object_launcher.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_custom_object_launcher.py rename to providers/tests/cncf/kubernetes/operators/test_custom_object_launcher.py diff --git a/tests/providers/cncf/kubernetes/operators/test_job.py b/providers/tests/cncf/kubernetes/operators/test_job.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_job.py rename to providers/tests/cncf/kubernetes/operators/test_job.py diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py b/providers/tests/cncf/kubernetes/operators/test_pod.py similarity index 99% rename from tests/providers/cncf/kubernetes/operators/test_pod.py rename to providers/tests/cncf/kubernetes/operators/test_pod.py index a4ccb4b44b4b..ac50641370fe 100644 --- a/tests/providers/cncf/kubernetes/operators/test_pod.py +++ b/providers/tests/cncf/kubernetes/operators/test_pod.py @@ -50,7 +50,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -1513,7 +1514,8 @@ def test_get_logs_but_not_for_base_container( @patch(KUB_OP_PATH.format("find_pod")) def test_execute_sync_callbacks(self, find_pod_mock): from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode - from tests.providers.cncf.kubernetes.test_callbacks import ( + + from providers.tests.cncf.kubernetes.test_callbacks import ( MockKubernetesPodOperatorCallback, MockWrapper, ) @@ -1578,7 +1580,8 @@ def test_execute_sync_callbacks(self, find_pod_mock): @patch(HOOK_CLASS, new=MagicMock) def test_execute_async_callbacks(self): from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode - from tests.providers.cncf.kubernetes.test_callbacks import ( + + from providers.tests.cncf.kubernetes.test_callbacks import ( MockKubernetesPodOperatorCallback, MockWrapper, ) diff --git a/tests/providers/cncf/kubernetes/operators/test_resource.py b/providers/tests/cncf/kubernetes/operators/test_resource.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_resource.py rename to providers/tests/cncf/kubernetes/operators/test_resource.py diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py similarity index 99% rename from tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py rename to providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py index 9c8c40de6558..18bce53688d4 100644 --- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py +++ b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py @@ -33,7 +33,8 @@ from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator from airflow.utils import db, timezone from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS @patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook") diff --git a/tests/providers/apache/impala/hooks/__init__.py b/providers/tests/cncf/kubernetes/resource_convert/__init__.py similarity index 100% rename from tests/providers/apache/impala/hooks/__init__.py rename to providers/tests/cncf/kubernetes/resource_convert/__init__.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_configmap.py b/providers/tests/cncf/kubernetes/resource_convert/test_configmap.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_configmap.py rename to providers/tests/cncf/kubernetes/resource_convert/test_configmap.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_env_variable.py b/providers/tests/cncf/kubernetes/resource_convert/test_env_variable.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_env_variable.py rename to providers/tests/cncf/kubernetes/resource_convert/test_env_variable.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_secret.py b/providers/tests/cncf/kubernetes/resource_convert/test_secret.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_secret.py rename to providers/tests/cncf/kubernetes/resource_convert/test_secret.py diff --git a/tests/providers/apache/kafka/__init__.py b/providers/tests/cncf/kubernetes/sensors/__init__.py similarity index 100% rename from tests/providers/apache/kafka/__init__.py rename to providers/tests/cncf/kubernetes/sensors/__init__.py diff --git a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py b/providers/tests/cncf/kubernetes/sensors/test_spark_kubernetes.py similarity index 100% rename from tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py rename to providers/tests/cncf/kubernetes/sensors/test_spark_kubernetes.py diff --git a/tests/providers/cncf/kubernetes/test_callbacks.py b/providers/tests/cncf/kubernetes/test_callbacks.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_callbacks.py rename to providers/tests/cncf/kubernetes/test_callbacks.py diff --git a/tests/providers/cncf/kubernetes/test_client.py b/providers/tests/cncf/kubernetes/test_client.py similarity index 98% rename from tests/providers/cncf/kubernetes/test_client.py rename to providers/tests/cncf/kubernetes/test_client.py index 1384068fd286..269717ffaa0f 100644 --- a/tests/providers/cncf/kubernetes/test_client.py +++ b/providers/tests/cncf/kubernetes/test_client.py @@ -28,7 +28,8 @@ _enable_tcp_keepalive, get_kube_client, ) -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestClient: diff --git a/tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py b/providers/tests/cncf/kubernetes/test_kubernetes_helper_functions.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py rename to providers/tests/cncf/kubernetes/test_kubernetes_helper_functions.py diff --git a/tests/providers/cncf/kubernetes/test_pod_generator.py b/providers/tests/cncf/kubernetes/test_pod_generator.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_pod_generator.py rename to providers/tests/cncf/kubernetes/test_pod_generator.py diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py b/providers/tests/cncf/kubernetes/test_template_rendering.py similarity index 96% rename from tests/providers/cncf/kubernetes/test_template_rendering.py rename to providers/tests/cncf/kubernetes/test_template_rendering.py index 4c087d6040e6..180b7a1e2d2e 100644 --- a/tests/providers/cncf/kubernetes/test_template_rendering.py +++ b/providers/tests/cncf/kubernetes/test_template_rendering.py @@ -22,16 +22,18 @@ import pytest from sqlalchemy.orm import make_transient -from airflow.configuration import TEST_DAGS_FOLDER from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml +from airflow.utils import timezone from airflow.utils.session import create_session from airflow.version import version -from tests.models import DEFAULT_DATE -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] +DEFAULT_DATE = timezone.datetime(2021, 9, 9) + @mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"}) @mock.patch("airflow.settings.pod_mutation_hook") @@ -126,7 +128,7 @@ def test_get_k8s_pod_yaml(render_k8s_pod_yaml, redact, dag_maker, session): with dag_maker("test_get_k8s_pod_yaml") as dag: task = BashOperator(task_id="test", bash_command="echo hi") dr = dag_maker.create_dagrun() - dag.fileloc = TEST_DAGS_FOLDER + "/test_get_k8s_pod_yaml.py" + dag.fileloc = "/test_get_k8s_pod_yaml.py" ti = dr.task_instances[0] ti.task = task diff --git a/tests/providers/apache/kafka/hooks/__init__.py b/providers/tests/cncf/kubernetes/triggers/__init__.py similarity index 100% rename from tests/providers/apache/kafka/hooks/__init__.py rename to providers/tests/cncf/kubernetes/triggers/__init__.py diff --git a/tests/providers/cncf/kubernetes/triggers/test_job.py b/providers/tests/cncf/kubernetes/triggers/test_job.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/test_job.py rename to providers/tests/cncf/kubernetes/triggers/test_job.py diff --git a/tests/providers/cncf/kubernetes/triggers/test_pod.py b/providers/tests/cncf/kubernetes/triggers/test_pod.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/test_pod.py rename to providers/tests/cncf/kubernetes/triggers/test_pod.py diff --git a/tests/providers/apache/kafka/operators/__init__.py b/providers/tests/cncf/kubernetes/utils/__init__.py similarity index 100% rename from tests/providers/apache/kafka/operators/__init__.py rename to providers/tests/cncf/kubernetes/utils/__init__.py diff --git a/tests/providers/cncf/kubernetes/utils/test_k8s_resource_iterator.py b/providers/tests/cncf/kubernetes/utils/test_k8s_resource_iterator.py similarity index 100% rename from tests/providers/cncf/kubernetes/utils/test_k8s_resource_iterator.py rename to providers/tests/cncf/kubernetes/utils/test_k8s_resource_iterator.py diff --git a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py b/providers/tests/cncf/kubernetes/utils/test_pod_manager.py similarity index 99% rename from tests/providers/cncf/kubernetes/utils/test_pod_manager.py rename to providers/tests/cncf/kubernetes/utils/test_pod_manager.py index 73dac5255d62..b577ea969ea3 100644 --- a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py +++ b/providers/tests/cncf/kubernetes/utils/test_pod_manager.py @@ -40,7 +40,8 @@ container_is_terminated, ) from airflow.utils.timezone import utc -from tests.providers.cncf.kubernetes.test_callbacks import MockKubernetesPodOperatorCallback, MockWrapper + +from providers.tests.cncf.kubernetes.test_callbacks import MockKubernetesPodOperatorCallback, MockWrapper if TYPE_CHECKING: from pendulum import DateTime diff --git a/tests/providers/apache/kafka/sensors/__init__.py b/providers/tests/cohere/__init__.py similarity index 100% rename from tests/providers/apache/kafka/sensors/__init__.py rename to providers/tests/cohere/__init__.py diff --git a/tests/providers/apache/kafka/triggers/__init__.py b/providers/tests/cohere/hooks/__init__.py similarity index 100% rename from tests/providers/apache/kafka/triggers/__init__.py rename to providers/tests/cohere/hooks/__init__.py diff --git a/tests/providers/cohere/hooks/test_cohere.py b/providers/tests/cohere/hooks/test_cohere.py similarity index 84% rename from tests/providers/cohere/hooks/test_cohere.py rename to providers/tests/cohere/hooks/test_cohere.py index 8f566ec0c640..28aef3ebaf79 100644 --- a/tests/providers/cohere/hooks/test_cohere.py +++ b/providers/tests/cohere/hooks/test_cohere.py @@ -34,11 +34,14 @@ def test__get_api_key(self): api_url = "http://some_host.com" timeout = 150 max_retries = 5 - with patch.object( - CohereHook, - "get_connection", - return_value=Connection(conn_type="cohere", password=api_key, host=api_url), - ), patch("cohere.Client") as client: + with ( + patch.object( + CohereHook, + "get_connection", + return_value=Connection(conn_type="cohere", password=api_key, host=api_url), + ), + patch("cohere.Client") as client, + ): hook = CohereHook(timeout=timeout, max_retries=max_retries) _ = hook.get_conn client.assert_called_once_with( diff --git a/tests/providers/apache/livy/__init__.py b/providers/tests/cohere/operators/__init__.py similarity index 100% rename from tests/providers/apache/livy/__init__.py rename to providers/tests/cohere/operators/__init__.py diff --git a/tests/providers/cohere/operators/test_embedding.py b/providers/tests/cohere/operators/test_embedding.py similarity index 100% rename from tests/providers/cohere/operators/test_embedding.py rename to providers/tests/cohere/operators/test_embedding.py diff --git a/tests/providers/apache/livy/hooks/__init__.py b/providers/tests/common/__init__.py similarity index 100% rename from tests/providers/apache/livy/hooks/__init__.py rename to providers/tests/common/__init__.py diff --git a/tests/providers/apache/livy/operators/__init__.py b/providers/tests/common/compat/__init__.py similarity index 100% rename from tests/providers/apache/livy/operators/__init__.py rename to providers/tests/common/compat/__init__.py diff --git a/tests/providers/apache/livy/sensors/__init__.py b/providers/tests/common/compat/lineage/__init__.py similarity index 100% rename from tests/providers/apache/livy/sensors/__init__.py rename to providers/tests/common/compat/lineage/__init__.py diff --git a/tests/providers/common/compat/lineage/test_hook.py b/providers/tests/common/compat/lineage/test_hook.py similarity index 100% rename from tests/providers/common/compat/lineage/test_hook.py rename to providers/tests/common/compat/lineage/test_hook.py diff --git a/tests/providers/apache/livy/triggers/__init__.py b/providers/tests/common/compat/openlineage/__init__.py similarity index 100% rename from tests/providers/apache/livy/triggers/__init__.py rename to providers/tests/common/compat/openlineage/__init__.py diff --git a/tests/providers/common/compat/openlineage/test_facet.py b/providers/tests/common/compat/openlineage/test_facet.py similarity index 100% rename from tests/providers/common/compat/openlineage/test_facet.py rename to providers/tests/common/compat/openlineage/test_facet.py diff --git a/tests/providers/apache/pig/operators/__init__.py b/providers/tests/common/compat/openlineage/utils/__init__.py similarity index 100% rename from tests/providers/apache/pig/operators/__init__.py rename to providers/tests/common/compat/openlineage/utils/__init__.py diff --git a/tests/providers/common/compat/openlineage/utils/test_utils.py b/providers/tests/common/compat/openlineage/utils/test_utils.py similarity index 100% rename from tests/providers/common/compat/openlineage/utils/test_utils.py rename to providers/tests/common/compat/openlineage/utils/test_utils.py diff --git a/tests/providers/apache/spark/decorators/__init__.py b/providers/tests/common/compat/security/__init__.py similarity index 100% rename from tests/providers/apache/spark/decorators/__init__.py rename to providers/tests/common/compat/security/__init__.py diff --git a/tests/providers/common/compat/security/test_permissions.py b/providers/tests/common/compat/security/test_permissions.py similarity index 100% rename from tests/providers/common/compat/security/test_permissions.py rename to providers/tests/common/compat/security/test_permissions.py diff --git a/tests/providers/apprise/__init__.py b/providers/tests/common/io/__init__.py similarity index 100% rename from tests/providers/apprise/__init__.py rename to providers/tests/common/io/__init__.py diff --git a/tests/providers/arangodb/__init__.py b/providers/tests/common/io/assets/__init__.py similarity index 100% rename from tests/providers/arangodb/__init__.py rename to providers/tests/common/io/assets/__init__.py diff --git a/tests/providers/common/io/assets/test_file.py b/providers/tests/common/io/assets/test_file.py similarity index 100% rename from tests/providers/common/io/assets/test_file.py rename to providers/tests/common/io/assets/test_file.py diff --git a/tests/providers/arangodb/hooks/__init__.py b/providers/tests/common/io/operators/__init__.py similarity index 100% rename from tests/providers/arangodb/hooks/__init__.py rename to providers/tests/common/io/operators/__init__.py diff --git a/tests/providers/common/io/operators/test_file_transfer.py b/providers/tests/common/io/operators/test_file_transfer.py similarity index 97% rename from tests/providers/common/io/operators/test_file_transfer.py rename to providers/tests/common/io/operators/test_file_transfer.py index 698c33582b82..55a196fa8918 100644 --- a/tests/providers/common/io/operators/test_file_transfer.py +++ b/providers/tests/common/io/operators/test_file_transfer.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.common.compat.openlineage.facet import Dataset -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.8.0", __file__): from airflow.providers.common.io.operators.file_transfer import FileTransferOperator diff --git a/tests/providers/arangodb/operators/__init__.py b/providers/tests/common/io/xcom/__init__.py similarity index 100% rename from tests/providers/arangodb/operators/__init__.py rename to providers/tests/common/io/xcom/__init__.py diff --git a/tests/providers/common/io/xcom/test_backend.py b/providers/tests/common/io/xcom/test_backend.py similarity index 96% rename from tests/providers/common/io/xcom/test_backend.py rename to providers/tests/common/io/xcom/test_backend.py index b0516ad487d4..ae45a2f863b7 100644 --- a/tests/providers/common/io/xcom/test_backend.py +++ b/providers/tests/common/io/xcom/test_backend.py @@ -19,8 +19,8 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error -from tests.www.test_utils import is_db_isolation_mode +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error +from dev.tests_common.test_utils.db import is_db_isolation_mode pytestmark = [ pytest.mark.db_test, @@ -37,8 +37,9 @@ from airflow.utils import timezone from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture(autouse=True) diff --git a/tests/providers/arangodb/sensors/__init__.py b/providers/tests/common/sql/__init__.py similarity index 100% rename from tests/providers/arangodb/sensors/__init__.py rename to providers/tests/common/sql/__init__.py diff --git a/tests/providers/atlassian/__init__.py b/providers/tests/common/sql/hooks/__init__.py similarity index 100% rename from tests/providers/atlassian/__init__.py rename to providers/tests/common/sql/hooks/__init__.py diff --git a/tests/providers/common/sql/hooks/test_dbapi.py b/providers/tests/common/sql/hooks/test_dbapi.py similarity index 99% rename from tests/providers/common/sql/hooks/test_dbapi.py rename to providers/tests/common/sql/hooks/test_dbapi.py index 6a744e9718da..5017a5f347c8 100644 --- a/tests/providers/common/sql/hooks/test_dbapi.py +++ b/providers/tests/common/sql/hooks/test_dbapi.py @@ -29,7 +29,8 @@ from airflow.hooks.base import BaseHook from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/hooks/test_sql.py b/providers/tests/common/sql/hooks/test_sql.py similarity index 98% rename from tests/providers/common/sql/hooks/test_sql.py rename to providers/tests/common/sql/hooks/test_sql.py index 9bc246192b77..87673f6de62e 100644 --- a/tests/providers/common/sql/hooks/test_sql.py +++ b/providers/tests/common/sql/hooks/test_sql.py @@ -30,8 +30,9 @@ from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler from airflow.utils.session import provide_session -from tests.providers.common.sql.test_utils import mock_hook -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS +from providers.tests.common.sql.test_utils import mock_hook pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/hooks/test_sqlparse.py b/providers/tests/common/sql/hooks/test_sqlparse.py similarity index 96% rename from tests/providers/common/sql/hooks/test_sqlparse.py rename to providers/tests/common/sql/hooks/test_sqlparse.py index 1ce7cb5d2183..b3622bd8eff2 100644 --- a/tests/providers/common/sql/hooks/test_sqlparse.py +++ b/providers/tests/common/sql/hooks/test_sqlparse.py @@ -19,7 +19,8 @@ import pytest from airflow.providers.common.sql.hooks.sql import DbApiHook -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/atlassian/jira/__init__.py b/providers/tests/common/sql/operators/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/__init__.py rename to providers/tests/common/sql/operators/__init__.py diff --git a/tests/providers/common/sql/operators/test_sql.py b/providers/tests/common/sql/operators/test_sql.py similarity index 99% rename from tests/providers/common/sql/operators/test_sql.py rename to providers/tests/common/sql/operators/test_sql.py index 5144b05b07e2..c2e4bc6f2e86 100644 --- a/tests/providers/common/sql/operators/test_sql.py +++ b/providers/tests/common/sql/operators/test_sql.py @@ -43,7 +43,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/common/sql/operators/test_sql_execute.py b/providers/tests/common/sql/operators/test_sql_execute.py similarity index 99% rename from tests/providers/common/sql/operators/test_sql_execute.py rename to providers/tests/common/sql/operators/test_sql_execute.py index 1527f3190d5b..bb42cbaba202 100644 --- a/tests/providers/common/sql/operators/test_sql_execute.py +++ b/providers/tests/common/sql/operators/test_sql_execute.py @@ -33,7 +33,8 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.openlineage.extractors.base import OperatorLineage -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/atlassian/jira/hooks/__init__.py b/providers/tests/common/sql/sensors/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/hooks/__init__.py rename to providers/tests/common/sql/sensors/__init__.py diff --git a/tests/providers/common/sql/sensors/test_sql.py b/providers/tests/common/sql/sensors/test_sql.py similarity index 99% rename from tests/providers/common/sql/sensors/test_sql.py rename to providers/tests/common/sql/sensors/test_sql.py index f4437a265a08..33e8fe639973 100644 --- a/tests/providers/common/sql/sensors/test_sql.py +++ b/providers/tests/common/sql/sensors/test_sql.py @@ -26,7 +26,8 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook from airflow.providers.common.sql.sensors.sql import SqlSensor from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/test_utils.py b/providers/tests/common/sql/test_utils.py similarity index 96% rename from tests/providers/common/sql/test_utils.py rename to providers/tests/common/sql/test_utils.py index 7c76f3a7fa50..19b7bcc33941 100644 --- a/tests/providers/common/sql/test_utils.py +++ b/providers/tests/common/sql/test_utils.py @@ -23,7 +23,8 @@ import pytest from airflow.models import Connection -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/conftest.py b/providers/tests/conftest.py similarity index 81% rename from tests/providers/conftest.py rename to providers/tests/conftest.py index 7dd0079ae6c7..6ca7990e6054 100644 --- a/tests/providers/conftest.py +++ b/providers/tests/conftest.py @@ -17,11 +17,20 @@ from __future__ import annotations +import pathlib from unittest import mock import pytest -from airflow.models import Connection +pytest_plugins = "dev.tests_common.pytest_plugin" + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + dep_path = [pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml")] + config.inicfg["airflow_deprecations_ignore"] = ( + config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] + ) @pytest.fixture @@ -34,6 +43,8 @@ def hook_conn(request): If param is exception than add side effect. Otherwise, it raises an error """ + from airflow.models import Connection + try: conn = request.param except AttributeError: diff --git a/tests/providers/cncf/kubernetes/__init__.py b/providers/tests/databricks/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/__init__.py rename to providers/tests/databricks/__init__.py diff --git a/tests/providers/cncf/kubernetes/models/__init__.py b/providers/tests/databricks/hooks/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/models/__init__.py rename to providers/tests/databricks/hooks/__init__.py diff --git a/tests/providers/databricks/hooks/test_databricks.py b/providers/tests/databricks/hooks/test_databricks.py similarity index 100% rename from tests/providers/databricks/hooks/test_databricks.py rename to providers/tests/databricks/hooks/test_databricks.py diff --git a/tests/providers/databricks/hooks/test_databricks_base.py b/providers/tests/databricks/hooks/test_databricks_base.py similarity index 100% rename from tests/providers/databricks/hooks/test_databricks_base.py rename to providers/tests/databricks/hooks/test_databricks_base.py diff --git a/tests/providers/databricks/hooks/test_databricks_sql.py b/providers/tests/databricks/hooks/test_databricks_sql.py similarity index 98% rename from tests/providers/databricks/hooks/test_databricks_sql.py rename to providers/tests/databricks/hooks/test_databricks_sql.py index a1b318ad00dc..fc1582db5d90 100644 --- a/tests/providers/databricks/hooks/test_databricks_sql.py +++ b/providers/tests/databricks/hooks/test_databricks_sql.py @@ -243,9 +243,10 @@ def test_query( hook_descriptions, hook_results, ): - with patch( - "airflow.providers.databricks.hooks.databricks_sql.DatabricksSqlHook.get_conn" - ) as mock_conn, patch("airflow.providers.databricks.hooks.databricks_base.requests") as mock_requests: + with ( + patch("airflow.providers.databricks.hooks.databricks_sql.DatabricksSqlHook.get_conn") as mock_conn, + patch("airflow.providers.databricks.hooks.databricks_base.requests") as mock_requests, + ): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = { "endpoints": [ diff --git a/tests/providers/databricks/__init__.py b/providers/tests/databricks/operators/__init__.py similarity index 100% rename from tests/providers/databricks/__init__.py rename to providers/tests/databricks/operators/__init__.py diff --git a/tests/providers/databricks/operators/test_databricks.py b/providers/tests/databricks/operators/test_databricks.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks.py rename to providers/tests/databricks/operators/test_databricks.py diff --git a/tests/providers/databricks/operators/test_databricks_copy.py b/providers/tests/databricks/operators/test_databricks_copy.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_copy.py rename to providers/tests/databricks/operators/test_databricks_copy.py diff --git a/tests/providers/databricks/operators/test_databricks_repos.py b/providers/tests/databricks/operators/test_databricks_repos.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_repos.py rename to providers/tests/databricks/operators/test_databricks_repos.py diff --git a/tests/providers/databricks/operators/test_databricks_sql.py b/providers/tests/databricks/operators/test_databricks_sql.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_sql.py rename to providers/tests/databricks/operators/test_databricks_sql.py diff --git a/tests/providers/databricks/operators/test_databricks_workflow.py b/providers/tests/databricks/operators/test_databricks_workflow.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_workflow.py rename to providers/tests/databricks/operators/test_databricks_workflow.py diff --git a/tests/providers/atlassian/jira/notifications/__init__.py b/providers/tests/databricks/plugins/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/notifications/__init__.py rename to providers/tests/databricks/plugins/__init__.py diff --git a/tests/providers/databricks/plugins/test_databricks_workflow.py b/providers/tests/databricks/plugins/test_databricks_workflow.py similarity index 97% rename from tests/providers/databricks/plugins/test_databricks_workflow.py rename to providers/tests/databricks/plugins/test_databricks_workflow.py index c140ac4450b8..695466c62d26 100644 --- a/tests/providers/databricks/plugins/test_databricks_workflow.py +++ b/providers/tests/databricks/plugins/test_databricks_workflow.py @@ -41,6 +41,8 @@ ) from airflow.www.app import create_app +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES + DAG_ID = "test_dag" TASK_ID = "test_task" RUN_ID = "test_run_1" @@ -190,6 +192,9 @@ def test_workflow_job_run_link(app): assert "https://mockhost/#job/1/run/1" in result +@pytest.mark.skipif( + RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES, reason="Web plugin test doesn't work when not against sources" +) @pytest.mark.db_test def test_workflow_job_repair_single_failed_link(app): with app.app_context(): diff --git a/tests/providers/atlassian/jira/operators/__init__.py b/providers/tests/databricks/sensors/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/operators/__init__.py rename to providers/tests/databricks/sensors/__init__.py diff --git a/tests/providers/databricks/sensors/test_databricks_partition.py b/providers/tests/databricks/sensors/test_databricks_partition.py similarity index 100% rename from tests/providers/databricks/sensors/test_databricks_partition.py rename to providers/tests/databricks/sensors/test_databricks_partition.py diff --git a/tests/providers/databricks/sensors/test_databricks_sql.py b/providers/tests/databricks/sensors/test_databricks_sql.py similarity index 100% rename from tests/providers/databricks/sensors/test_databricks_sql.py rename to providers/tests/databricks/sensors/test_databricks_sql.py diff --git a/tests/providers/databricks/hooks/__init__.py b/providers/tests/databricks/triggers/__init__.py similarity index 100% rename from tests/providers/databricks/hooks/__init__.py rename to providers/tests/databricks/triggers/__init__.py diff --git a/tests/providers/databricks/triggers/test_databricks.py b/providers/tests/databricks/triggers/test_databricks.py similarity index 100% rename from tests/providers/databricks/triggers/test_databricks.py rename to providers/tests/databricks/triggers/test_databricks.py diff --git a/tests/providers/atlassian/jira/sensors/__init__.py b/providers/tests/databricks/utils/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/sensors/__init__.py rename to providers/tests/databricks/utils/__init__.py diff --git a/tests/providers/databricks/utils/test_databricks.py b/providers/tests/databricks/utils/test_databricks.py similarity index 100% rename from tests/providers/databricks/utils/test_databricks.py rename to providers/tests/databricks/utils/test_databricks.py diff --git a/tests/providers/databricks/operators/__init__.py b/providers/tests/datadog/__init__.py similarity index 100% rename from tests/providers/databricks/operators/__init__.py rename to providers/tests/datadog/__init__.py diff --git a/tests/providers/databricks/triggers/__init__.py b/providers/tests/datadog/hooks/__init__.py similarity index 100% rename from tests/providers/databricks/triggers/__init__.py rename to providers/tests/datadog/hooks/__init__.py diff --git a/tests/providers/datadog/hooks/test_datadog.py b/providers/tests/datadog/hooks/test_datadog.py similarity index 100% rename from tests/providers/datadog/hooks/test_datadog.py rename to providers/tests/datadog/hooks/test_datadog.py diff --git a/tests/providers/datadog/__init__.py b/providers/tests/datadog/sensors/__init__.py similarity index 100% rename from tests/providers/datadog/__init__.py rename to providers/tests/datadog/sensors/__init__.py diff --git a/tests/providers/datadog/sensors/test_datadog.py b/providers/tests/datadog/sensors/test_datadog.py similarity index 100% rename from tests/providers/datadog/sensors/test_datadog.py rename to providers/tests/datadog/sensors/test_datadog.py diff --git a/tests/providers/celery/cli/__init__.py b/providers/tests/dbt/__init__.py similarity index 100% rename from tests/providers/celery/cli/__init__.py rename to providers/tests/dbt/__init__.py diff --git a/tests/providers/celery/log_handlers/__init__.py b/providers/tests/dbt/cloud/__init__.py similarity index 100% rename from tests/providers/celery/log_handlers/__init__.py rename to providers/tests/dbt/cloud/__init__.py diff --git a/tests/providers/cncf/__init__.py b/providers/tests/dbt/cloud/hooks/__init__.py similarity index 100% rename from tests/providers/cncf/__init__.py rename to providers/tests/dbt/cloud/hooks/__init__.py diff --git a/tests/providers/dbt/cloud/hooks/test_dbt.py b/providers/tests/dbt/cloud/hooks/test_dbt.py similarity index 99% rename from tests/providers/dbt/cloud/hooks/test_dbt.py rename to providers/tests/dbt/cloud/hooks/test_dbt.py index 0d84189bc804..590f1b677f10 100644 --- a/tests/providers/dbt/cloud/hooks/test_dbt.py +++ b/providers/tests/dbt/cloud/hooks/test_dbt.py @@ -676,8 +676,9 @@ def fake_sleep(seconds): # Shift frozen time every time we call a ``time.sleep`` during this test case. time_machine.shift(timedelta(seconds=seconds)) - with patch.object(DbtCloudHook, "get_job_run_status") as mock_job_run_status, patch( - "airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep + with ( + patch.object(DbtCloudHook, "get_job_run_status") as mock_job_run_status, + patch("airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep), ): mock_job_run_status.return_value = job_run_status diff --git a/tests/providers/cncf/kubernetes/cli/__init__.py b/providers/tests/dbt/cloud/operators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/cli/__init__.py rename to providers/tests/dbt/cloud/operators/__init__.py diff --git a/tests/providers/dbt/cloud/operators/test_dbt.py b/providers/tests/dbt/cloud/operators/test_dbt.py similarity index 99% rename from tests/providers/dbt/cloud/operators/test_dbt.py rename to providers/tests/dbt/cloud/operators/test_dbt.py index 658fe84a49d6..eb50bd5a22a2 100644 --- a/tests/providers/dbt/cloud/operators/test_dbt.py +++ b/providers/tests/dbt/cloud/operators/test_dbt.py @@ -235,8 +235,9 @@ def fake_sleep(seconds): overall_delta = timedelta(seconds=seconds) + timedelta(microseconds=42) time_machine.shift(overall_delta) - with patch.object(DbtCloudHook, "get_job_run") as mock_get_job_run, patch( - "airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep + with ( + patch.object(DbtCloudHook, "get_job_run") as mock_get_job_run, + patch("airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep), ): mock_get_job_run.return_value.json.return_value = { "data": {"status": job_run_status, "id": RUN_ID} diff --git a/tests/providers/cncf/kubernetes/data_files/__init__.py b/providers/tests/dbt/cloud/sensors/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/__init__.py rename to providers/tests/dbt/cloud/sensors/__init__.py diff --git a/tests/providers/dbt/cloud/sensors/test_dbt.py b/providers/tests/dbt/cloud/sensors/test_dbt.py similarity index 100% rename from tests/providers/dbt/cloud/sensors/test_dbt.py rename to providers/tests/dbt/cloud/sensors/test_dbt.py diff --git a/tests/providers/cncf/kubernetes/data_files/executor/__init__.py b/providers/tests/dbt/cloud/test_data/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/executor/__init__.py rename to providers/tests/dbt/cloud/test_data/__init__.py diff --git a/tests/providers/dbt/cloud/test_data/catalog.json b/providers/tests/dbt/cloud/test_data/catalog.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/catalog.json rename to providers/tests/dbt/cloud/test_data/catalog.json diff --git a/tests/providers/dbt/cloud/test_data/job_run.json b/providers/tests/dbt/cloud/test_data/job_run.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/job_run.json rename to providers/tests/dbt/cloud/test_data/job_run.json diff --git a/tests/providers/dbt/cloud/test_data/manifest.json b/providers/tests/dbt/cloud/test_data/manifest.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/manifest.json rename to providers/tests/dbt/cloud/test_data/manifest.json diff --git a/tests/providers/dbt/cloud/test_data/run_results.json b/providers/tests/dbt/cloud/test_data/run_results.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/run_results.json rename to providers/tests/dbt/cloud/test_data/run_results.json diff --git a/tests/providers/cncf/kubernetes/data_files/pods/__init__.py b/providers/tests/dbt/cloud/triggers/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/__init__.py rename to providers/tests/dbt/cloud/triggers/__init__.py diff --git a/tests/providers/dbt/cloud/triggers/test_dbt.py b/providers/tests/dbt/cloud/triggers/test_dbt.py similarity index 100% rename from tests/providers/dbt/cloud/triggers/test_dbt.py rename to providers/tests/dbt/cloud/triggers/test_dbt.py diff --git a/tests/providers/cncf/kubernetes/data_files/spark/__init__.py b/providers/tests/dbt/cloud/utils/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/__init__.py rename to providers/tests/dbt/cloud/utils/__init__.py diff --git a/tests/providers/dbt/cloud/utils/test_openlineage.py b/providers/tests/dbt/cloud/utils/test_openlineage.py similarity index 100% rename from tests/providers/dbt/cloud/utils/test_openlineage.py rename to providers/tests/dbt/cloud/utils/test_openlineage.py diff --git a/providers/tests/deprecations_ignore.yml b/providers/tests/deprecations_ignore.yml new file mode 100644 index 000000000000..b5aff3c84ea5 --- /dev/null +++ b/providers/tests/deprecations_ignore.yml @@ -0,0 +1,128 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- + +- providers/tests/amazon/aws/deferrable/hooks/test_base_aws.py::TestAwsBaseAsyncHook::test_get_client_async +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_cluster_status +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status_exception +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_pause_cluster +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster_exception +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success +- providers/tests/google/common/auth_backend/test_google_openid.py::TestGoogleOpenID::test_success +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs_duplication_warning +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_queries +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_completed +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_timeout +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_jobs_to_cancel +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_get_dataset_tables_list +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_and_write_disposition +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_options +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_source_format +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_extract +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_load_with_non_csv_as_src_fmt +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options_incorrect +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params_fails +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_without_sql_fails +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_table_delete +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookRunWithConfiguration::test_run_with_configuration_location +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_create_external_table_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_copy_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_load_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_query_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_description +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_labels +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_description +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_labels +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestDatasetsOperations::test_patch_dataset +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTableOperations::test_patch_table +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_with_auto_detect +- providers/tests/google/cloud/hooks/test_gcs.py::TestGCSHook::test_list__error_match_glob_and_invalid_delimiter +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_error_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_run_pipeline_immediately_complete +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_waiting_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_delegate_to_runtime_error +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_error_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_location_path +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_run_pipeline_immediately_complete +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_waiting_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_run_pipeline +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_cancel_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_create_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_delete_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_get_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_list_pipeline_jobs +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_cancel_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_create_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_delete_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_get_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_list_pipeline_jobs +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHadoopOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPySparkOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute_override_project_id +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataprocClusterScaleOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::test_create_cluster_operator_extra_links +- providers/tests/google/cloud/operators/test_dataproc.py::test_scale_cluster_operator_extra_links +- providers/tests/google/cloud/operators/test_dataproc.py::test_submit_spark_job_operator_extra_links +- providers/tests/google/cloud/operators/test_gcs.py::TestGoogleCloudStorageListOperator::test_execute__delimiter +- providers/tests/google/cloud/operators/test_kubernetes_engine.py::TestGoogleCloudPlatformContainerOperator::test_create_execute_error_body +- providers/tests/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes +- providers/tests/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes_without_project_id +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_copy_files_into_a_folder +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_last_modified_time +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_more_than_1_wildcard +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_prefix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_suffix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_prefix_and_suffix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_empty_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object_retained_prefix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false_with_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_without_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_a_delimiter +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_delimiter_and_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_different_delimiter_and_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_all_true_cond +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_one_true_cond +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_no_last_modified_time diff --git a/tests/providers/datadog/hooks/__init__.py b/providers/tests/dingding/__init__.py similarity index 100% rename from tests/providers/datadog/hooks/__init__.py rename to providers/tests/dingding/__init__.py diff --git a/tests/providers/datadog/sensors/__init__.py b/providers/tests/dingding/hooks/__init__.py similarity index 100% rename from tests/providers/datadog/sensors/__init__.py rename to providers/tests/dingding/hooks/__init__.py diff --git a/tests/providers/dingding/hooks/test_dingding.py b/providers/tests/dingding/hooks/test_dingding.py similarity index 100% rename from tests/providers/dingding/hooks/test_dingding.py rename to providers/tests/dingding/hooks/test_dingding.py diff --git a/tests/providers/dingding/__init__.py b/providers/tests/dingding/operators/__init__.py similarity index 100% rename from tests/providers/dingding/__init__.py rename to providers/tests/dingding/operators/__init__.py diff --git a/tests/providers/dingding/operators/test_dingding.py b/providers/tests/dingding/operators/test_dingding.py similarity index 100% rename from tests/providers/dingding/operators/test_dingding.py rename to providers/tests/dingding/operators/test_dingding.py diff --git a/tests/providers/dingding/hooks/__init__.py b/providers/tests/discord/__init__.py similarity index 100% rename from tests/providers/dingding/hooks/__init__.py rename to providers/tests/discord/__init__.py diff --git a/tests/providers/dingding/operators/__init__.py b/providers/tests/discord/hooks/__init__.py similarity index 100% rename from tests/providers/dingding/operators/__init__.py rename to providers/tests/discord/hooks/__init__.py diff --git a/tests/providers/discord/hooks/test_discord_webhook.py b/providers/tests/discord/hooks/test_discord_webhook.py similarity index 100% rename from tests/providers/discord/hooks/test_discord_webhook.py rename to providers/tests/discord/hooks/test_discord_webhook.py diff --git a/tests/providers/cncf/kubernetes/decorators/__init__.py b/providers/tests/discord/notifications/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/decorators/__init__.py rename to providers/tests/discord/notifications/__init__.py diff --git a/tests/providers/discord/notifications/test_discord.py b/providers/tests/discord/notifications/test_discord.py similarity index 100% rename from tests/providers/discord/notifications/test_discord.py rename to providers/tests/discord/notifications/test_discord.py diff --git a/tests/providers/discord/__init__.py b/providers/tests/discord/operators/__init__.py similarity index 100% rename from tests/providers/discord/__init__.py rename to providers/tests/discord/operators/__init__.py diff --git a/tests/providers/discord/operators/test_discord_webhook.py b/providers/tests/discord/operators/test_discord_webhook.py similarity index 100% rename from tests/providers/discord/operators/test_discord_webhook.py rename to providers/tests/discord/operators/test_discord_webhook.py diff --git a/tests/providers/discord/hooks/__init__.py b/providers/tests/docker/__init__.py similarity index 100% rename from tests/providers/discord/hooks/__init__.py rename to providers/tests/docker/__init__.py diff --git a/tests/providers/docker/conftest.py b/providers/tests/docker/conftest.py similarity index 100% rename from tests/providers/docker/conftest.py rename to providers/tests/docker/conftest.py diff --git a/tests/providers/cncf/kubernetes/executors/__init__.py b/providers/tests/docker/decorators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/executors/__init__.py rename to providers/tests/docker/decorators/__init__.py diff --git a/tests/providers/docker/decorators/test_docker.py b/providers/tests/docker/decorators/test_docker.py similarity index 100% rename from tests/providers/docker/decorators/test_docker.py rename to providers/tests/docker/decorators/test_docker.py diff --git a/tests/providers/discord/operators/__init__.py b/providers/tests/docker/hooks/__init__.py similarity index 100% rename from tests/providers/discord/operators/__init__.py rename to providers/tests/docker/hooks/__init__.py diff --git a/tests/providers/docker/hooks/test_docker.py b/providers/tests/docker/hooks/test_docker.py similarity index 100% rename from tests/providers/docker/hooks/test_docker.py rename to providers/tests/docker/hooks/test_docker.py diff --git a/tests/providers/docker/__init__.py b/providers/tests/docker/operators/__init__.py similarity index 100% rename from tests/providers/docker/__init__.py rename to providers/tests/docker/operators/__init__.py diff --git a/tests/providers/docker/operators/test_docker.py b/providers/tests/docker/operators/test_docker.py similarity index 99% rename from tests/providers/docker/operators/test_docker.py rename to providers/tests/docker/operators/test_docker.py index 8a461f7c084a..9dbc84657f4f 100644 --- a/tests/providers/docker/operators/test_docker.py +++ b/providers/tests/docker/operators/test_docker.py @@ -860,7 +860,8 @@ def test_partial_deprecated_skip_exit_code_ambiguous( with set_current_task_instance_session(session=session): warning_match = r"`skip_exit_code` is deprecated and will be removed" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Conflicting `skip_on_exit_code` provided" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Conflicting `skip_on_exit_code` provided"), ): ti.render_templates() diff --git a/tests/providers/docker/operators/test_docker_swarm.py b/providers/tests/docker/operators/test_docker_swarm.py similarity index 100% rename from tests/providers/docker/operators/test_docker_swarm.py rename to providers/tests/docker/operators/test_docker_swarm.py diff --git a/tests/providers/docker/test_exceptions.py b/providers/tests/docker/test_exceptions.py similarity index 100% rename from tests/providers/docker/test_exceptions.py rename to providers/tests/docker/test_exceptions.py diff --git a/tests/providers/docker/hooks/__init__.py b/providers/tests/edge/__init__.py similarity index 100% rename from tests/providers/docker/hooks/__init__.py rename to providers/tests/edge/__init__.py diff --git a/tests/providers/docker/operators/__init__.py b/providers/tests/edge/api_endpoints/__init__.py similarity index 100% rename from tests/providers/docker/operators/__init__.py rename to providers/tests/edge/api_endpoints/__init__.py diff --git a/tests/providers/edge/api_endpoints/test_health_endpoint.py b/providers/tests/edge/api_endpoints/test_health_endpoint.py similarity index 100% rename from tests/providers/edge/api_endpoints/test_health_endpoint.py rename to providers/tests/edge/api_endpoints/test_health_endpoint.py diff --git a/tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py similarity index 98% rename from tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py rename to providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py index becf2f9397e3..9a9b4a7dd885 100644 --- a/tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py +++ b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py @@ -40,8 +40,9 @@ from airflow.utils.jwt_signer import JWTSigner from airflow.utils.state import State from airflow.www import app -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting diff --git a/tests/providers/edge/__init__.py b/providers/tests/edge/cli/__init__.py similarity index 100% rename from tests/providers/edge/__init__.py rename to providers/tests/edge/cli/__init__.py diff --git a/tests/providers/edge/cli/test_edge_command.py b/providers/tests/edge/cli/test_edge_command.py similarity index 99% rename from tests/providers/edge/cli/test_edge_command.py rename to providers/tests/edge/cli/test_edge_command.py index 398c221db02f..af3e2c00e299 100644 --- a/tests/providers/edge/cli/test_edge_command.py +++ b/providers/tests/edge/cli/test_edge_command.py @@ -33,7 +33,8 @@ from airflow.providers.edge.models.edge_job import EdgeJob from airflow.providers.edge.models.edge_worker import EdgeWorker, EdgeWorkerState from airflow.utils.state import TaskInstanceState -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytest.importorskip("pydantic", minversion="2.0.0") diff --git a/tests/providers/edge/api_endpoints/__init__.py b/providers/tests/edge/models/__init__.py similarity index 100% rename from tests/providers/edge/api_endpoints/__init__.py rename to providers/tests/edge/models/__init__.py diff --git a/tests/providers/edge/models/test_edge_job.py b/providers/tests/edge/models/test_edge_job.py similarity index 100% rename from tests/providers/edge/models/test_edge_job.py rename to providers/tests/edge/models/test_edge_job.py diff --git a/tests/providers/edge/models/test_edge_logs.py b/providers/tests/edge/models/test_edge_logs.py similarity index 100% rename from tests/providers/edge/models/test_edge_logs.py rename to providers/tests/edge/models/test_edge_logs.py diff --git a/tests/providers/edge/models/test_edge_worker.py b/providers/tests/edge/models/test_edge_worker.py similarity index 100% rename from tests/providers/edge/models/test_edge_worker.py rename to providers/tests/edge/models/test_edge_worker.py diff --git a/tests/providers/edge/cli/__init__.py b/providers/tests/edge/plugins/__init__.py similarity index 100% rename from tests/providers/edge/cli/__init__.py rename to providers/tests/edge/plugins/__init__.py diff --git a/tests/providers/edge/plugins/test_edge_executor_plugin.py b/providers/tests/edge/plugins/test_edge_executor_plugin.py similarity index 97% rename from tests/providers/edge/plugins/test_edge_executor_plugin.py rename to providers/tests/edge/plugins/test_edge_executor_plugin.py index e3422b17da3c..d0c5a40770b3 100644 --- a/tests/providers/edge/plugins/test_edge_executor_plugin.py +++ b/providers/tests/edge/plugins/test_edge_executor_plugin.py @@ -22,7 +22,8 @@ from airflow.plugins_manager import AirflowPlugin from airflow.providers.edge.plugins import edge_executor_plugin -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def test_plugin_inactive(): diff --git a/tests/providers/edge/models/__init__.py b/providers/tests/elasticsearch/__init__.py similarity index 100% rename from tests/providers/edge/models/__init__.py rename to providers/tests/elasticsearch/__init__.py diff --git a/tests/providers/edge/plugins/__init__.py b/providers/tests/elasticsearch/hooks/__init__.py similarity index 100% rename from tests/providers/edge/plugins/__init__.py rename to providers/tests/elasticsearch/hooks/__init__.py diff --git a/tests/providers/elasticsearch/hooks/test_elasticsearch.py b/providers/tests/elasticsearch/hooks/test_elasticsearch.py similarity index 100% rename from tests/providers/elasticsearch/hooks/test_elasticsearch.py rename to providers/tests/elasticsearch/hooks/test_elasticsearch.py diff --git a/tests/providers/cncf/kubernetes/hooks/__init__.py b/providers/tests/elasticsearch/log/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/hooks/__init__.py rename to providers/tests/elasticsearch/log/__init__.py diff --git a/tests/providers/elasticsearch/log/elasticmock/__init__.py b/providers/tests/elasticsearch/log/elasticmock/__init__.py similarity index 98% rename from tests/providers/elasticsearch/log/elasticmock/__init__.py rename to providers/tests/elasticsearch/log/elasticmock/__init__.py index 00d92471ba9f..44e242d11457 100644 --- a/tests/providers/elasticsearch/log/elasticmock/__init__.py +++ b/providers/tests/elasticsearch/log/elasticmock/__init__.py @@ -43,7 +43,7 @@ from unittest.mock import patch from urllib.parse import unquote, urlparse -from tests.providers.elasticsearch.log.elasticmock.fake_elasticsearch import FakeElasticsearch +from providers.tests.elasticsearch.log.elasticmock.fake_elasticsearch import FakeElasticsearch ELASTIC_INSTANCES: dict[str, FakeElasticsearch] = {} diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py similarity index 99% rename from tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py rename to providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py index 26e47cbf08f7..1d975ee718c3 100644 --- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py +++ b/providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py @@ -22,7 +22,7 @@ from elasticsearch import Elasticsearch from elasticsearch.exceptions import NotFoundError -from tests.providers.elasticsearch.log.elasticmock.utilities import ( +from providers.tests.elasticsearch.log.elasticmock.utilities import ( MissingIndexException, get_random_id, query_params, diff --git a/tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py b/providers/tests/elasticsearch/log/elasticmock/utilities/__init__.py similarity index 100% rename from tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py rename to providers/tests/elasticsearch/log/elasticmock/utilities/__init__.py diff --git a/tests/providers/elasticsearch/log/test_es_json_formatter.py b/providers/tests/elasticsearch/log/test_es_json_formatter.py similarity index 100% rename from tests/providers/elasticsearch/log/test_es_json_formatter.py rename to providers/tests/elasticsearch/log/test_es_json_formatter.py diff --git a/tests/providers/elasticsearch/log/test_es_response.py b/providers/tests/elasticsearch/log/test_es_response.py similarity index 100% rename from tests/providers/elasticsearch/log/test_es_response.py rename to providers/tests/elasticsearch/log/test_es_response.py diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py b/providers/tests/elasticsearch/log/test_es_task_handler.py similarity index 99% rename from tests/providers/elasticsearch/log/test_es_task_handler.py rename to providers/tests/elasticsearch/log/test_es_task_handler.py index 9321f49d7787..abde5daf8bf1 100644 --- a/tests/providers/elasticsearch/log/test_es_task_handler.py +++ b/providers/tests/elasticsearch/log/test_es_task_handler.py @@ -43,10 +43,11 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from tests.providers.elasticsearch.log.elasticmock import elasticmock -from tests.providers.elasticsearch.log.elasticmock.utilities import SearchFailedException -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from providers.tests.elasticsearch.log.elasticmock import elasticmock +from providers.tests.elasticsearch.log.elasticmock.utilities import SearchFailedException pytestmark = pytest.mark.db_test diff --git a/tests/providers/elasticsearch/__init__.py b/providers/tests/email/__init__.py similarity index 100% rename from tests/providers/elasticsearch/__init__.py rename to providers/tests/email/__init__.py diff --git a/tests/providers/elasticsearch/hooks/__init__.py b/providers/tests/email/operators/__init__.py similarity index 100% rename from tests/providers/elasticsearch/hooks/__init__.py rename to providers/tests/email/operators/__init__.py diff --git a/tests/providers/email/__init__.py b/providers/tests/exasol/__init__.py similarity index 100% rename from tests/providers/email/__init__.py rename to providers/tests/exasol/__init__.py diff --git a/tests/providers/email/operators/__init__.py b/providers/tests/exasol/hooks/__init__.py similarity index 100% rename from tests/providers/email/operators/__init__.py rename to providers/tests/exasol/hooks/__init__.py diff --git a/tests/providers/exasol/hooks/test_exasol.py b/providers/tests/exasol/hooks/test_exasol.py similarity index 100% rename from tests/providers/exasol/hooks/test_exasol.py rename to providers/tests/exasol/hooks/test_exasol.py diff --git a/tests/providers/exasol/hooks/test_sql.py b/providers/tests/exasol/hooks/test_sql.py similarity index 100% rename from tests/providers/exasol/hooks/test_sql.py rename to providers/tests/exasol/hooks/test_sql.py diff --git a/tests/providers/exasol/__init__.py b/providers/tests/exasol/operators/__init__.py similarity index 100% rename from tests/providers/exasol/__init__.py rename to providers/tests/exasol/operators/__init__.py diff --git a/tests/providers/exasol/operators/test_exasol.py b/providers/tests/exasol/operators/test_exasol.py similarity index 100% rename from tests/providers/exasol/operators/test_exasol.py rename to providers/tests/exasol/operators/test_exasol.py diff --git a/tests/providers/exasol/operators/test_exasol_sql.py b/providers/tests/exasol/operators/test_exasol_sql.py similarity index 100% rename from tests/providers/exasol/operators/test_exasol_sql.py rename to providers/tests/exasol/operators/test_exasol_sql.py diff --git a/tests/providers/exasol/hooks/__init__.py b/providers/tests/fab/__init__.py similarity index 100% rename from tests/providers/exasol/hooks/__init__.py rename to providers/tests/fab/__init__.py diff --git a/tests/providers/exasol/operators/__init__.py b/providers/tests/fab/auth_manager/__init__.py similarity index 100% rename from tests/providers/exasol/operators/__init__.py rename to providers/tests/fab/auth_manager/__init__.py diff --git a/tests/providers/fab/__init__.py b/providers/tests/fab/auth_manager/api/__init__.py similarity index 100% rename from tests/providers/fab/__init__.py rename to providers/tests/fab/auth_manager/api/__init__.py diff --git a/tests/providers/cncf/kubernetes/log_handlers/__init__.py b/providers/tests/fab/auth_manager/api/auth/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/log_handlers/__init__.py rename to providers/tests/fab/auth_manager/api/auth/__init__.py diff --git a/tests/providers/cncf/kubernetes/operators/__init__.py b/providers/tests/fab/auth_manager/api/auth/backend/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/__init__.py rename to providers/tests/fab/auth_manager/api/auth/backend/__init__.py diff --git a/tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py similarity index 98% rename from tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py rename to providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py index 4a5104829a06..2c783e2046f8 100644 --- a/tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py @@ -24,7 +24,8 @@ from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import requires_authentication from airflow.www import app as application -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py similarity index 92% rename from tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py rename to providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py index c763042e1c95..7d13a89e8db8 100644 --- a/tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app diff --git a/tests/providers/cncf/kubernetes/resource_convert/__init__.py b/providers/tests/fab/auth_manager/api_endpoints/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/__init__.py rename to providers/tests/fab/auth_manager/api_endpoints/__init__.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py rename to providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py index 61d923d5ff12..e4cbe93c9d62 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py +++ b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py @@ -18,7 +18,7 @@ from contextlib import contextmanager -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES diff --git a/tests/providers/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py b/providers/tests/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py similarity index 100% rename from tests/providers/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py rename to providers/tests/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py index 8e749d1dd18f..5c22e5ba7ff9 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py @@ -22,7 +22,13 @@ import time_machine from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from airflow.security import permissions +from airflow.utils import timezone + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.www import _check_last_log +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: from airflow.models.asset import AssetDagRunQueue, AssetModel @@ -31,11 +37,6 @@ raise else: pass -from airflow.security import permissions -from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.www import _check_last_log pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_auth.py b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py similarity index 95% rename from tests/providers/fab/auth_manager/api_endpoints/test_auth.py rename to providers/tests/fab/auth_manager/api_endpoints/test_auth.py index d3012e2f1b43..630ce7050bed 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_auth.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py @@ -21,11 +21,11 @@ import pytest from flask_login import current_user -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.www import client_with_login +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.www import client_with_login pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py index 56f135d457e9..9d9a79af5113 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py @@ -25,7 +25,21 @@ import pytest from airflow.models import DagBag, DagModel -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from airflow.models.dag import DAG +from airflow.models.serialized_dag import SerializedDagModel +from airflow.operators.empty import EmptyOperator +from airflow.security import permissions +from airflow.utils import timezone +from airflow.utils.session import provide_session + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: from airflow.models.backfill import Backfill @@ -34,14 +48,6 @@ raise else: pass -from airflow.models.dag import DAG -from airflow.models.serialized_dag import SerializedDagModel -from airflow.operators.empty import EmptyOperator -from airflow.security import permissions -from airflow.utils import timezone -from airflow.utils.session import provide_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_cors.py b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_cors.py rename to providers/tests/fab/auth_manager/api_endpoints/test_cors.py index b44eab8820ec..8dbc4f964e61 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_cors.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py @@ -20,9 +20,9 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py index b78ac58e442e..e93425591f70 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py @@ -28,10 +28,11 @@ from airflow.operators.empty import EmptyOperator from airflow.security import permissions from airflow.utils.session import provide_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py index a58ea08ff31c..fa09df1be74e 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py @@ -27,7 +27,14 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import DagRunState -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( + create_user, + delete_roles, + delete_user, +) try: from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -36,12 +43,6 @@ raise else: pass -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user, - delete_roles, - delete_user, -) -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py similarity index 95% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py index f0d9b0da298c..27e41136555e 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py @@ -24,9 +24,10 @@ from airflow.models import DagBag from airflow.security import permissions -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py similarity index 92% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py index adfde1cc5b3e..01a2c68091ce 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py @@ -22,9 +22,10 @@ from airflow.models.dagwarning import DagWarning from airflow.security import permissions from airflow.utils.session import create_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dag_warnings, clear_db_dags + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py index acf3ca62684a..4794893241f6 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py @@ -21,9 +21,10 @@ from airflow.models import Log from airflow.security import permissions from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_logs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_logs +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py index a2fa1d028a3f..110e8e630255 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py @@ -21,10 +21,11 @@ from airflow.models.dag import DagModel from airflow.security import permissions from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from tests.test_utils.db import clear_db_dags, clear_db_import_errors -from tests.test_utils.permissions import _resource_name + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors +from dev.tests_common.test_utils.permissions import _resource_name +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py similarity index 99% rename from tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py index 413a49a9d86a..b72c6fe6612f 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py @@ -19,22 +19,21 @@ import pytest from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from airflow.security import permissions + +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_role, create_user, delete_role, delete_user, ) -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models import Role from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES - -from airflow.security import permissions - pytestmark = pytest.mark.db_test diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py rename to providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py index 4a2f0068e5e4..f5ec73a3e989 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py @@ -24,7 +24,8 @@ role_schema, ) from airflow.security import permissions -from tests.test_utils.api_connexion_utils import create_role, delete_role + +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py similarity index 98% rename from tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py index 69b3c221eae9..7d379b6f8c00 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py @@ -28,13 +28,14 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py similarity index 99% rename from tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py index 7f2c885bab52..c7a20cb59c32 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py @@ -25,18 +25,19 @@ from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import create_session -from tests.test_utils.compat import ignore_provider_compatibility_error -with ignore_provider_compatibility_error("2.9.0+", __file__): - from airflow.providers.fab.auth_manager.models import User - -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.config import conf_vars +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_role, delete_user, ) -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.config import conf_vars + +with ignore_provider_compatibility_error("2.9.0+", __file__): + from airflow.providers.fab.auth_manager.models import User + pytestmark = pytest.mark.db_test diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py rename to providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py index f3399de6a977..b37f27abe039 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py @@ -18,14 +18,15 @@ import pytest -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role -from tests.test_utils.compat import ignore_provider_compatibility_error +from airflow.utils import timezone + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.api_connexion.schemas.user_schema import user_collection_item_schema, user_schema from airflow.providers.fab.auth_manager.models import User -from airflow.utils import timezone TEST_EMAIL = "test@example.org" diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py similarity index 93% rename from tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py index a8e71e1a8246..802eb4824087 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py @@ -20,9 +20,10 @@ from airflow.models import Variable from airflow.security import permissions -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_variables +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py index 01336f9957c6..06e8ee484776 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py @@ -29,9 +29,10 @@ from airflow.utils.dates import parse_execution_date from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/cncf/kubernetes/sensors/__init__.py b/providers/tests/fab/auth_manager/cli_commands/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/sensors/__init__.py rename to providers/tests/fab/auth_manager/cli_commands/__init__.py diff --git a/tests/providers/fab/auth_manager/cli_commands/test_db_command.py b/providers/tests/fab/auth_manager/cli_commands/test_db_command.py similarity index 100% rename from tests/providers/fab/auth_manager/cli_commands/test_db_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_db_command.py diff --git a/tests/providers/fab/auth_manager/cli_commands/test_definition.py b/providers/tests/fab/auth_manager/cli_commands/test_definition.py similarity index 94% rename from tests/providers/fab/auth_manager/cli_commands/test_definition.py rename to providers/tests/fab/auth_manager/cli_commands/test_definition.py index 2db5d352ecc1..de906bef1ba3 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_definition.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_definition.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.definition import ( diff --git a/tests/providers/fab/auth_manager/cli_commands/test_role_command.py b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py similarity index 98% rename from tests/providers/fab/auth_manager/cli_commands/test_role_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_role_command.py index 5f12c01860d1..fd176702c721 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_role_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py @@ -25,8 +25,9 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.config import conf_vars with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import role_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py similarity index 96% rename from tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py index 9e1817bd5617..e0e4a70f4ac8 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py @@ -22,7 +22,8 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import sync_perm_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_user_command.py b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py similarity index 99% rename from tests/providers/fab/auth_manager/cli_commands/test_user_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_user_command.py index b8ce2f48d6c0..5f2f66b0866a 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_user_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py @@ -26,7 +26,8 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import user_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_utils.py b/providers/tests/fab/auth_manager/cli_commands/test_utils.py similarity index 93% rename from tests/providers/fab/auth_manager/cli_commands/test_utils.py rename to providers/tests/fab/auth_manager/cli_commands/test_utils.py index fd8b1dfd50c8..f52defae6bb8 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_utils.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_utils.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder diff --git a/tests/providers/fab/auth_manager/conftest.py b/providers/tests/fab/auth_manager/conftest.py similarity index 91% rename from tests/providers/fab/auth_manager/conftest.py rename to providers/tests/fab/auth_manager/conftest.py index a8fbe5fbdaaa..9102f5d0f65d 100644 --- a/tests/providers/fab/auth_manager/conftest.py +++ b/providers/tests/fab/auth_manager/conftest.py @@ -19,8 +19,9 @@ import pytest from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="session") @@ -42,7 +43,7 @@ def factory(): ( "api", "auth_backends", - ): "tests.providers.fab.auth_manager.api_endpoints.remote_user_api_auth_backend,airflow.api.auth.backend.session", + ): "providers.tests.fab.auth_manager.api_endpoints.remote_user_api_auth_backend,airflow.api.auth.backend.session", ( "core", "auth_manager", diff --git a/tests/providers/cncf/kubernetes/triggers/__init__.py b/providers/tests/fab/auth_manager/decorators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/__init__.py rename to providers/tests/fab/auth_manager/decorators/__init__.py diff --git a/tests/providers/fab/auth_manager/decorators/test_auth.py b/providers/tests/fab/auth_manager/decorators/test_auth.py similarity index 98% rename from tests/providers/fab/auth_manager/decorators/test_auth.py rename to providers/tests/fab/auth_manager/decorators/test_auth.py index 98f77a4f3427..202f0d622707 100644 --- a/tests/providers/fab/auth_manager/decorators/test_auth.py +++ b/providers/tests/fab/auth_manager/decorators/test_auth.py @@ -21,7 +21,8 @@ import pytest from airflow.security.permissions import ACTION_CAN_READ, RESOURCE_DAG -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error permissions = [(ACTION_CAN_READ, RESOURCE_DAG)] diff --git a/tests/providers/fab/auth_manager/__init__.py b/providers/tests/fab/auth_manager/models/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/__init__.py rename to providers/tests/fab/auth_manager/models/__init__.py diff --git a/tests/providers/fab/auth_manager/models/test_anonymous_user.py b/providers/tests/fab/auth_manager/models/test_anonymous_user.py similarity index 93% rename from tests/providers/fab/auth_manager/models/test_anonymous_user.py rename to providers/tests/fab/auth_manager/models/test_anonymous_user.py index 4e365e3c8b70..419d17aa9f3f 100644 --- a/tests/providers/fab/auth_manager/models/test_anonymous_user.py +++ b/providers/tests/fab/auth_manager/models/test_anonymous_user.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models.anonymous_user import AnonymousUser diff --git a/tests/providers/fab/auth_manager/models/test_db.py b/providers/tests/fab/auth_manager/models/test_db.py similarity index 93% rename from tests/providers/fab/auth_manager/models/test_db.py rename to providers/tests/fab/auth_manager/models/test_db.py index 528e1cbf099f..3af94ceed7b1 100644 --- a/tests/providers/fab/auth_manager/models/test_db.py +++ b/providers/tests/fab/auth_manager/models/test_db.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import os import re from unittest import mock @@ -25,7 +24,7 @@ from alembic.migration import MigrationContext from sqlalchemy import MetaData -import airflow +import airflow.providers from airflow.settings import engine from airflow.utils.db import ( compare_server_default, @@ -38,20 +37,16 @@ class TestFABDBManager: def setup_method(self): - self.airflow_dir = os.path.dirname(airflow.__file__) + self.providers_dir: str = airflow.providers.__path__[0] def test_version_table_name_set(self, session): assert FABDBManager(session=session).version_table_name == "alembic_version_fab" def test_migration_dir_set(self, session): - assert ( - FABDBManager(session=session).migration_dir == f"{self.airflow_dir}/providers/fab/migrations" - ) + assert FABDBManager(session=session).migration_dir == f"{self.providers_dir}/fab/migrations" def test_alembic_file_set(self, session): - assert ( - FABDBManager(session=session).alembic_file == f"{self.airflow_dir}/providers/fab/alembic.ini" - ) + assert FABDBManager(session=session).alembic_file == f"{self.providers_dir}/fab/alembic.ini" def test_supports_table_dropping_set(self, session): assert FABDBManager(session=session).supports_table_dropping is True diff --git a/tests/providers/cncf/kubernetes/utils/__init__.py b/providers/tests/fab/auth_manager/security_manager/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/utils/__init__.py rename to providers/tests/fab/auth_manager/security_manager/__init__.py diff --git a/tests/providers/fab/auth_manager/security_manager/test_constants.py b/providers/tests/fab/auth_manager/security_manager/test_constants.py similarity index 93% rename from tests/providers/fab/auth_manager/security_manager/test_constants.py rename to providers/tests/fab/auth_manager/security_manager/test_constants.py index 5a718eee4b63..a6566fd98710 100644 --- a/tests/providers/fab/auth_manager/security_manager/test_constants.py +++ b/providers/tests/fab/auth_manager/security_manager/test_constants.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.constants import EXISTING_ROLES diff --git a/tests/providers/fab/auth_manager/security_manager/test_override.py b/providers/tests/fab/auth_manager/security_manager/test_override.py similarity index 95% rename from tests/providers/fab/auth_manager/security_manager/test_override.py rename to providers/tests/fab/auth_manager/security_manager/test_override.py index 6d85c0319dc4..2733b688d1ae 100644 --- a/tests/providers/fab/auth_manager/security_manager/test_override.py +++ b/providers/tests/fab/auth_manager/security_manager/test_override.py @@ -19,7 +19,7 @@ from unittest import mock from unittest.mock import Mock -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride diff --git a/tests/providers/fab/auth_manager/test_fab_auth_manager.py b/providers/tests/fab/auth_manager/test_fab_auth_manager.py similarity index 99% rename from tests/providers/fab/auth_manager/test_fab_auth_manager.py rename to providers/tests/fab/auth_manager/test_fab_auth_manager.py index d727b6090822..064c8e0dfd4c 100644 --- a/tests/providers/fab/auth_manager/test_fab_auth_manager.py +++ b/providers/tests/fab/auth_manager/test_fab_auth_manager.py @@ -31,7 +31,7 @@ except ImportError: pass -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager diff --git a/tests/providers/fab/auth_manager/test_models.py b/providers/tests/fab/auth_manager/test_models.py similarity index 96% rename from tests/providers/fab/auth_manager/test_models.py rename to providers/tests/fab/auth_manager/test_models.py index 30677d709575..6f03be373187 100644 --- a/tests/providers/fab/auth_manager/test_models.py +++ b/providers/tests/fab/auth_manager/test_models.py @@ -20,7 +20,7 @@ from sqlalchemy import Column, MetaData, String, Table -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models import ( diff --git a/tests/providers/fab/auth_manager/test_security.py b/providers/tests/fab/auth_manager/test_security.py similarity index 98% rename from tests/providers/fab/auth_manager/test_security.py rename to providers/tests/fab/auth_manager/test_security.py index bebb52c256fc..8c9e221b71df 100644 --- a/tests/providers/fab/auth_manager/test_security.py +++ b/providers/tests/fab/auth_manager/test_security.py @@ -36,7 +36,8 @@ from airflow.exceptions import AirflowException from airflow.models import DagModel from airflow.models.dag import DAG -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager @@ -49,17 +50,18 @@ from airflow.www.auth import get_access_denied_message from airflow.www.extensions.init_auth_manager import get_auth_manager from airflow.www.utils import CustomSQLAInterface -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from dev.tests_common.test_utils.mock_security_manager import MockSecurityManager +from dev.tests_common.test_utils.permissions import _resource_name +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, create_user_scope, delete_role, delete_user, set_user_single_role, ) -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_dags, clear_db_runs -from tests.test_utils.mock_security_manager import MockSecurityManager -from tests.test_utils.permissions import _resource_name if TYPE_CHECKING: from airflow.security.permissions import RESOURCE_ASSET diff --git a/tests/providers/fab/auth_manager/api/__init__.py b/providers/tests/fab/auth_manager/views/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/__init__.py rename to providers/tests/fab/auth_manager/views/__init__.py diff --git a/tests/providers/fab/auth_manager/views/test_permissions.py b/providers/tests/fab/auth_manager/views/test_permissions.py similarity index 93% rename from tests/providers/fab/auth_manager/views/test_permissions.py rename to providers/tests/fab/auth_manager/views/test_permissions.py index f24d9b738343..2ac26ffe45e8 100644 --- a/tests/providers/fab/auth_manager/views/test_permissions.py +++ b/providers/tests/fab/auth_manager/views/test_permissions.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_roles_list.py b/providers/tests/fab/auth_manager/views/test_roles_list.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_roles_list.py rename to providers/tests/fab/auth_manager/views/test_roles_list.py index 8de63ad5ba88..3c1509d75350 100644 --- a/tests/providers/fab/auth_manager/views/test_roles_list.py +++ b/providers/tests/fab/auth_manager/views/test_roles_list.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user.py b/providers/tests/fab/auth_manager/views/test_user.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user.py rename to providers/tests/fab/auth_manager/views/test_user.py index 62b03a99e7c2..ee068f7f319f 100644 --- a/tests/providers/fab/auth_manager/views/test_user.py +++ b/providers/tests/fab/auth_manager/views/test_user.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user_edit.py b/providers/tests/fab/auth_manager/views/test_user_edit.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user_edit.py rename to providers/tests/fab/auth_manager/views/test_user_edit.py index 8099f6794818..7cdc1a493b00 100644 --- a/tests/providers/fab/auth_manager/views/test_user_edit.py +++ b/providers/tests/fab/auth_manager/views/test_user_edit.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user_stats.py b/providers/tests/fab/auth_manager/views/test_user_stats.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user_stats.py rename to providers/tests/fab/auth_manager/views/test_user_stats.py index ae09cf92252c..e50bc87535a4 100644 --- a/tests/providers/fab/auth_manager/views/test_user_stats.py +++ b/providers/tests/fab/auth_manager/views/test_user_stats.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/cohere/__init__.py b/providers/tests/facebook/__init__.py similarity index 100% rename from tests/providers/cohere/__init__.py rename to providers/tests/facebook/__init__.py diff --git a/tests/providers/cohere/hooks/__init__.py b/providers/tests/facebook/ads/__init__.py similarity index 100% rename from tests/providers/cohere/hooks/__init__.py rename to providers/tests/facebook/ads/__init__.py diff --git a/tests/providers/cohere/operators/__init__.py b/providers/tests/facebook/ads/hooks/__init__.py similarity index 100% rename from tests/providers/cohere/operators/__init__.py rename to providers/tests/facebook/ads/hooks/__init__.py diff --git a/tests/providers/facebook/ads/hooks/test_ads.py b/providers/tests/facebook/ads/hooks/test_ads.py similarity index 100% rename from tests/providers/facebook/ads/hooks/test_ads.py rename to providers/tests/facebook/ads/hooks/test_ads.py diff --git a/tests/providers/fab/auth_manager/models/__init__.py b/providers/tests/ftp/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/models/__init__.py rename to providers/tests/ftp/__init__.py diff --git a/tests/providers/fab/auth_manager/views/__init__.py b/providers/tests/ftp/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/views/__init__.py rename to providers/tests/ftp/hooks/__init__.py diff --git a/tests/providers/ftp/hooks/test_ftp.py b/providers/tests/ftp/hooks/test_ftp.py similarity index 100% rename from tests/providers/ftp/hooks/test_ftp.py rename to providers/tests/ftp/hooks/test_ftp.py diff --git a/tests/providers/common/__init__.py b/providers/tests/ftp/operators/__init__.py similarity index 100% rename from tests/providers/common/__init__.py rename to providers/tests/ftp/operators/__init__.py diff --git a/tests/providers/ftp/operators/test_ftp.py b/providers/tests/ftp/operators/test_ftp.py similarity index 100% rename from tests/providers/ftp/operators/test_ftp.py rename to providers/tests/ftp/operators/test_ftp.py diff --git a/tests/providers/ftp/__init__.py b/providers/tests/ftp/sensors/__init__.py similarity index 100% rename from tests/providers/ftp/__init__.py rename to providers/tests/ftp/sensors/__init__.py diff --git a/tests/providers/ftp/sensors/test_ftp.py b/providers/tests/ftp/sensors/test_ftp.py similarity index 100% rename from tests/providers/ftp/sensors/test_ftp.py rename to providers/tests/ftp/sensors/test_ftp.py diff --git a/tests/providers/common/compat/__init__.py b/providers/tests/github/__init__.py similarity index 100% rename from tests/providers/common/compat/__init__.py rename to providers/tests/github/__init__.py diff --git a/tests/providers/common/compat/lineage/__init__.py b/providers/tests/github/hooks/__init__.py similarity index 100% rename from tests/providers/common/compat/lineage/__init__.py rename to providers/tests/github/hooks/__init__.py diff --git a/tests/providers/github/hooks/test_github.py b/providers/tests/github/hooks/test_github.py similarity index 100% rename from tests/providers/github/hooks/test_github.py rename to providers/tests/github/hooks/test_github.py diff --git a/tests/providers/common/compat/openlineage/__init__.py b/providers/tests/github/operators/__init__.py similarity index 100% rename from tests/providers/common/compat/openlineage/__init__.py rename to providers/tests/github/operators/__init__.py diff --git a/tests/providers/github/operators/test_github.py b/providers/tests/github/operators/test_github.py similarity index 100% rename from tests/providers/github/operators/test_github.py rename to providers/tests/github/operators/test_github.py diff --git a/tests/providers/common/compat/openlineage/utils/__init__.py b/providers/tests/github/sensors/__init__.py similarity index 100% rename from tests/providers/common/compat/openlineage/utils/__init__.py rename to providers/tests/github/sensors/__init__.py diff --git a/tests/providers/github/sensors/test_github.py b/providers/tests/github/sensors/test_github.py similarity index 100% rename from tests/providers/github/sensors/test_github.py rename to providers/tests/github/sensors/test_github.py diff --git a/tests/providers/common/compat/security/__init__.py b/providers/tests/google/__init__.py similarity index 100% rename from tests/providers/common/compat/security/__init__.py rename to providers/tests/google/__init__.py diff --git a/tests/providers/common/io/__init__.py b/providers/tests/google/ads/__init__.py similarity index 100% rename from tests/providers/common/io/__init__.py rename to providers/tests/google/ads/__init__.py diff --git a/tests/providers/common/io/assets/__init__.py b/providers/tests/google/ads/hooks/__init__.py similarity index 100% rename from tests/providers/common/io/assets/__init__.py rename to providers/tests/google/ads/hooks/__init__.py diff --git a/tests/providers/google/ads/hooks/test_ads.py b/providers/tests/google/ads/hooks/test_ads.py similarity index 100% rename from tests/providers/google/ads/hooks/test_ads.py rename to providers/tests/google/ads/hooks/test_ads.py diff --git a/tests/providers/common/io/operators/__init__.py b/providers/tests/google/ads/operators/__init__.py similarity index 100% rename from tests/providers/common/io/operators/__init__.py rename to providers/tests/google/ads/operators/__init__.py diff --git a/tests/providers/google/ads/operators/test_ads.py b/providers/tests/google/ads/operators/test_ads.py similarity index 100% rename from tests/providers/google/ads/operators/test_ads.py rename to providers/tests/google/ads/operators/test_ads.py diff --git a/tests/providers/common/io/xcom/__init__.py b/providers/tests/google/ads/transfers/__init__.py similarity index 100% rename from tests/providers/common/io/xcom/__init__.py rename to providers/tests/google/ads/transfers/__init__.py diff --git a/tests/providers/google/ads/transfers/test_ads_to_gcs.py b/providers/tests/google/ads/transfers/test_ads_to_gcs.py similarity index 97% rename from tests/providers/google/ads/transfers/test_ads_to_gcs.py rename to providers/tests/google/ads/transfers/test_ads_to_gcs.py index a97131203fa1..3d9c42849449 100644 --- a/tests/providers/google/ads/transfers/test_ads_to_gcs.py +++ b/providers/tests/google/ads/transfers/test_ads_to_gcs.py @@ -19,7 +19,8 @@ from unittest import mock from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator -from tests.providers.google.ads.operators.test_ads import ( + +from providers.tests.google.ads.operators.test_ads import ( BUCKET, CLIENT_IDS, FIELDS_TO_EXTRACT, diff --git a/tests/providers/common/sql/__init__.py b/providers/tests/google/assets/__init__.py similarity index 100% rename from tests/providers/common/sql/__init__.py rename to providers/tests/google/assets/__init__.py diff --git a/tests/providers/google/datasets/test_bigquery.py b/providers/tests/google/assets/test_bigquery.py similarity index 100% rename from tests/providers/google/datasets/test_bigquery.py rename to providers/tests/google/assets/test_bigquery.py diff --git a/tests/providers/common/sql/hooks/__init__.py b/providers/tests/google/cloud/__init__.py similarity index 100% rename from tests/providers/common/sql/hooks/__init__.py rename to providers/tests/google/cloud/__init__.py diff --git a/tests/providers/common/sql/operators/__init__.py b/providers/tests/google/cloud/_internal_client/__init__.py similarity index 100% rename from tests/providers/common/sql/operators/__init__.py rename to providers/tests/google/cloud/_internal_client/__init__.py diff --git a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py b/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py similarity index 100% rename from tests/providers/google/cloud/_internal_client/test_secret_manager_client.py rename to providers/tests/google/cloud/_internal_client/test_secret_manager_client.py diff --git a/tests/providers/common/sql/sensors/__init__.py b/providers/tests/google/cloud/hooks/__init__.py similarity index 100% rename from tests/providers/common/sql/sensors/__init__.py rename to providers/tests/google/cloud/hooks/__init__.py diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/providers/tests/google/cloud/hooks/test_automl.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_automl.py rename to providers/tests/google/cloud/hooks/test_automl.py index 47f956c6d103..26db131a9eef 100644 --- a/tests/providers/google/cloud/hooks/test_automl.py +++ b/providers/tests/google/cloud/hooks/test_automl.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id CREDENTIALS = "test-creds" TASK_ID = "test-automl-hook" diff --git a/tests/providers/google/cloud/hooks/test_bigquery.py b/providers/tests/google/cloud/hooks/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_bigquery.py rename to providers/tests/google/cloud/hooks/test_bigquery.py diff --git a/tests/providers/google/cloud/hooks/test_bigquery_dts.py b/providers/tests/google/cloud/hooks/test_bigquery_dts.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_bigquery_dts.py rename to providers/tests/google/cloud/hooks/test_bigquery_dts.py index 1f973a9f21d8..163ca25273b1 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_dts.py +++ b/providers/tests/google/cloud/hooks/test_bigquery_dts.py @@ -30,7 +30,8 @@ AsyncBiqQueryDataTransferServiceHook, BiqQueryDataTransferServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id CREDENTIALS = "test-creds" PROJECT_ID = "id" diff --git a/tests/providers/google/cloud/hooks/test_bigquery_system.py b/providers/tests/google/cloud/hooks/test_bigquery_system.py similarity index 94% rename from tests/providers/google/cloud/hooks/test_bigquery_system.py rename to providers/tests/google/cloud/hooks/test_bigquery_system.py index 3076f4a123e3..676ab3503583 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_system.py +++ b/providers/tests/google/cloud/hooks/test_bigquery_system.py @@ -22,8 +22,9 @@ import pytest from airflow.providers.google.cloud.hooks import bigquery as hook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY @pytest.mark.system("google.cloud") diff --git a/tests/providers/google/cloud/hooks/test_bigtable.py b/providers/tests/google/cloud/hooks/test_bigtable.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_bigtable.py rename to providers/tests/google/cloud/hooks/test_bigtable.py index 4dda2fb009e7..f1929ba00d58 100644 --- a/tests/providers/google/cloud/hooks/test_bigtable.py +++ b/providers/tests/google/cloud/hooks/test_bigtable.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.hooks.bigtable import BigtableHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_batch.py b/providers/tests/google/cloud/hooks/test_cloud_batch.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_batch.py rename to providers/tests/google/cloud/hooks/test_cloud_batch.py index de83b64fbe79..05cc26b6b479 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_batch.py +++ b/providers/tests/google/cloud/hooks/test_cloud_batch.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.cloud_batch import CloudBatchAsyncHook, CloudBatchHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id pytestmark = pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_cloud_build.py b/providers/tests/google/cloud/hooks/test_cloud_build.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_build.py rename to providers/tests/google/cloud/hooks/test_cloud_build.py index 7672d460d0fb..e65a5cdb8ba3 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_build.py +++ b/providers/tests/google/cloud/hooks/test_cloud_build.py @@ -32,7 +32,8 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildAsyncHook, CloudBuildHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id PROJECT_ID = "cloud-build-project" LOCATION = "test-location" diff --git a/tests/providers/google/cloud/hooks/test_cloud_composer.py b/providers/tests/google/cloud/hooks/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_cloud_composer.py rename to providers/tests/google/cloud/hooks/test_cloud_composer.py diff --git a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py b/providers/tests/google/cloud/hooks/test_cloud_memorystore.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_memorystore.py rename to providers/tests/google/cloud/hooks/test_cloud_memorystore.py index 592acb82c236..d96ebc5b4322 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py +++ b/providers/tests/google/cloud/hooks/test_cloud_memorystore.py @@ -32,7 +32,8 @@ CloudMemorystoreHook, CloudMemorystoreMemcachedHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_run.py b/providers/tests/google/cloud/hooks/test_cloud_run.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_run.py rename to providers/tests/google/cloud/hooks/test_cloud_run.py index 48b76e1bca12..d23e96ff6e03 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_run.py +++ b/providers/tests/google/cloud/hooks/test_cloud_run.py @@ -41,7 +41,8 @@ CloudRunServiceAsyncHook, CloudRunServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id @pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_cloud_sql.py b/providers/tests/google/cloud/hooks/test_cloud_sql.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_sql.py rename to providers/tests/google/cloud/hooks/test_cloud_sql.py index a5d6e1666407..3365dc76aec3 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_sql.py +++ b/providers/tests/google/cloud/hooks/test_cloud_sql.py @@ -40,7 +40,8 @@ CloudSQLHook, CloudSqlProxyRunner, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py index 672a80d2fae1..a68bcd3afef2 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py +++ b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py @@ -46,7 +46,8 @@ GcpTransferOperationStatus, gen_job_name, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py rename to providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py index e05bacbbd2c3..968c1a95efbd 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py +++ b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import ( CloudDataTransferServiceAsyncHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TEST_PROJECT_ID = "project-id" TRANSFER_HOOK_PATH = "airflow.providers.google.cloud.hooks.cloud_storage_transfer_service" diff --git a/tests/providers/google/cloud/hooks/test_compute.py b/providers/tests/google/cloud/hooks/test_compute.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_compute.py rename to providers/tests/google/cloud/hooks/test_compute.py index f4d5da5414ac..24a43ac3d87b 100644 --- a/tests/providers/google/cloud/hooks/test_compute.py +++ b/providers/tests/google/cloud/hooks/test_compute.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook, GceOperationStatus -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_compute_ssh.py b/providers/tests/google/cloud/hooks/test_compute_ssh.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_compute_ssh.py rename to providers/tests/google/cloud/hooks/test_compute_ssh.py diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/providers/tests/google/cloud/hooks/test_datacatalog.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_datacatalog.py rename to providers/tests/google/cloud/hooks/test_datacatalog.py index c97cef7c4ae1..29ce515e8886 100644 --- a/tests/providers/google/cloud/hooks/test_datacatalog.py +++ b/providers/tests/google/cloud/hooks/test_datacatalog.py @@ -28,7 +28,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/providers/tests/google/cloud/hooks/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataflow.py rename to providers/tests/google/cloud/hooks/test_dataflow.py diff --git a/tests/providers/google/cloud/hooks/test_dataform.py b/providers/tests/google/cloud/hooks/test_dataform.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataform.py rename to providers/tests/google/cloud/hooks/test_dataform.py index d5c94e40d5f9..bedf91dadf6e 100644 --- a/tests/providers/google/cloud/hooks/test_dataform.py +++ b/providers/tests/google/cloud/hooks/test_dataform.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dataform import DataformHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id pytestmark = pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_datafusion.py b/providers/tests/google/cloud/hooks/test_datafusion.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_datafusion.py rename to providers/tests/google/cloud/hooks/test_datafusion.py index 7358c334fda1..271662f7bca5 100644 --- a/tests/providers/google/cloud/hooks/test_datafusion.py +++ b/providers/tests/google/cloud/hooks/test_datafusion.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.datafusion import DataFusionAsyncHook, DataFusionHook from airflow.providers.google.cloud.utils.datafusion import DataFusionPipelineType -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v1beta1" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/cloud/hooks/test_datapipeline.py b/providers/tests/google/cloud/hooks/test_datapipeline.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_datapipeline.py rename to providers/tests/google/cloud/hooks/test_datapipeline.py diff --git a/tests/providers/google/cloud/hooks/test_dataplex.py b/providers/tests/google/cloud/hooks/test_dataplex.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataplex.py rename to providers/tests/google/cloud/hooks/test_dataplex.py index deca942a02fc..9221a0e09465 100644 --- a/tests/providers/google/cloud/hooks/test_dataplex.py +++ b/providers/tests/google/cloud/hooks/test_dataplex.py @@ -22,7 +22,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.operators.dataplex import DataplexHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}" DATAPLEX_STRING = "airflow.providers.google.cloud.hooks.dataplex.{}" diff --git a/tests/providers/google/cloud/hooks/test_dataprep.py b/providers/tests/google/cloud/hooks/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataprep.py rename to providers/tests/google/cloud/hooks/test_dataprep.py diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/providers/tests/google/cloud/hooks/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataproc.py rename to providers/tests/google/cloud/hooks/test_dataproc.py diff --git a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py b/providers/tests/google/cloud/hooks/test_dataproc_metastore.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataproc_metastore.py rename to providers/tests/google/cloud/hooks/test_dataproc_metastore.py index 28f506ce301b..58a04d6bc835 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py +++ b/providers/tests/google/cloud/hooks/test_dataproc_metastore.py @@ -23,7 +23,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.dataproc_metastore import DataprocMetastoreHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_datastore.py b/providers/tests/google/cloud/hooks/test_datastore.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_datastore.py rename to providers/tests/google/cloud/hooks/test_datastore.py diff --git a/tests/providers/google/cloud/hooks/test_dlp.py b/providers/tests/google/cloud/hooks/test_dlp.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dlp.py rename to providers/tests/google/cloud/hooks/test_dlp.py index 186d5de1732b..f0c63fcd2362 100644 --- a/tests/providers/google/cloud/hooks/test_dlp.py +++ b/providers/tests/google/cloud/hooks/test_dlp.py @@ -33,7 +33,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dlp import CloudDLPHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} ORGANIZATION_ID = "test-org" diff --git a/tests/providers/google/cloud/hooks/test_functions.py b/providers/tests/google/cloud/hooks/test_functions.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_functions.py rename to providers/tests/google/cloud/hooks/test_functions.py index f7bb02b1e2c9..4e3c48795117 100644 --- a/tests/providers/google/cloud/hooks/test_functions.py +++ b/providers/tests/google/cloud/hooks/test_functions.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.functions import CloudFunctionsHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, get_open_mock, mock_base_gcp_hook_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/providers/tests/google/cloud/hooks/test_gcs.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_gcs.py rename to providers/tests/google/cloud/hooks/test_gcs.py index 5d2735834a95..464534bd11d9 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/providers/tests/google/cloud/hooks/test_gcs.py @@ -41,7 +41,8 @@ from airflow.providers.google.common.consts import CLIENT_INFO from airflow.utils import timezone from airflow.version import version -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}" GCS_STRING = "airflow.providers.google.cloud.hooks.gcs.{}" diff --git a/tests/providers/google/cloud/hooks/test_gdm.py b/providers/tests/google/cloud/hooks/test_gdm.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_gdm.py rename to providers/tests/google/cloud/hooks/test_gdm.py diff --git a/tests/providers/google/cloud/hooks/test_kms.py b/providers/tests/google/cloud/hooks/test_kms.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_kms.py rename to providers/tests/google/cloud/hooks/test_kms.py diff --git a/tests/providers/google/cloud/hooks/test_kms_system.py b/providers/tests/google/cloud/hooks/test_kms_system.py similarity index 96% rename from tests/providers/google/cloud/hooks/test_kms_system.py rename to providers/tests/google/cloud/hooks/test_kms_system.py index afb4de1d1d04..374fabb5246d 100644 --- a/tests/providers/google/cloud/hooks/test_kms_system.py +++ b/providers/tests/google/cloud/hooks/test_kms_system.py @@ -23,8 +23,9 @@ import pytest from airflow.providers.google.cloud.hooks.kms import CloudKMSHook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_KMS_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_KMS_KEY # To prevent resource name collisions, key ring and key resources CANNOT be deleted, so # to avoid cluttering the project, we only create the key once during project initialization. diff --git a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py b/providers/tests/google/cloud/hooks/test_kubernetes_engine.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_kubernetes_engine.py rename to providers/tests/google/cloud/hooks/test_kubernetes_engine.py index 479cceed0581..e535323f4d4c 100644 --- a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py +++ b/providers/tests/google/cloud/hooks/test_kubernetes_engine.py @@ -34,7 +34,8 @@ GKEKubernetesHook, ) from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TASK_ID = "test-gke-cluster-operator" CLUSTER_NAME = "test-cluster" diff --git a/tests/providers/google/cloud/hooks/test_life_sciences.py b/providers/tests/google/cloud/hooks/test_life_sciences.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_life_sciences.py rename to providers/tests/google/cloud/hooks/test_life_sciences.py index 582b856b22c8..ca113ec648f5 100644 --- a/tests/providers/google/cloud/hooks/test_life_sciences.py +++ b/providers/tests/google/cloud/hooks/test_life_sciences.py @@ -28,7 +28,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.life_sciences import LifeSciencesHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_looker.py b/providers/tests/google/cloud/hooks/test_looker.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_looker.py rename to providers/tests/google/cloud/hooks/test_looker.py diff --git a/tests/providers/google/cloud/hooks/test_mlengine.py b/providers/tests/google/cloud/hooks/test_mlengine.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_mlengine.py rename to providers/tests/google/cloud/hooks/test_mlengine.py index 7e8d7d68032f..85ac30327d4c 100644 --- a/tests/providers/google/cloud/hooks/test_mlengine.py +++ b/providers/tests/google/cloud/hooks/test_mlengine.py @@ -31,7 +31,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks import mlengine as hook from airflow.providers.google.cloud.hooks.mlengine import MLEngineAsyncHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_natural_language.py b/providers/tests/google/cloud/hooks/test_natural_language.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_natural_language.py rename to providers/tests/google/cloud/hooks/test_natural_language.py index 22ba3d87c10f..a3228439b10e 100644 --- a/tests/providers/google/cloud/hooks/test_natural_language.py +++ b/providers/tests/google/cloud/hooks/test_natural_language.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.natural_language import CloudNaturalLanguageHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} DOCUMENT = Document( diff --git a/tests/providers/google/cloud/hooks/test_os_login.py b/providers/tests/google/cloud/hooks/test_os_login.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_os_login.py rename to providers/tests/google/cloud/hooks/test_os_login.py index 568e96610c76..48861f695469 100644 --- a/tests/providers/google/cloud/hooks/test_os_login.py +++ b/providers/tests/google/cloud/hooks/test_os_login.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.os_login import OSLoginHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/providers/tests/google/cloud/hooks/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_pubsub.py rename to providers/tests/google/cloud/hooks/test_pubsub.py diff --git a/tests/providers/google/cloud/hooks/test_secret_manager.py b/providers/tests/google/cloud/hooks/test_secret_manager.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_secret_manager.py rename to providers/tests/google/cloud/hooks/test_secret_manager.py index a800f1f785b8..e758f76a9dcd 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager.py +++ b/providers/tests/google/cloud/hooks/test_secret_manager.py @@ -29,7 +29,8 @@ SecretsManagerHook, ) from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_secret_manager_system.py b/providers/tests/google/cloud/hooks/test_secret_manager_system.py similarity index 95% rename from tests/providers/google/cloud/hooks/test_secret_manager_system.py rename to providers/tests/google/cloud/hooks/test_secret_manager_system.py index 6aacc078b9a2..d13305f52b42 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager_system.py +++ b/providers/tests/google/cloud/hooks/test_secret_manager_system.py @@ -21,8 +21,9 @@ import pytest from airflow.providers.google.cloud.hooks.secret_manager import SecretsManagerHook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY TEST_SECRET_ID = os.environ.get("GCP_SECRET_MANAGER_SECRET_ID", "test-secret") TEST_SECRET_VALUE = os.environ.get("GCP_SECRET_MANAGER_SECRET_VALUE", "test-secret-value") diff --git a/tests/providers/google/cloud/hooks/test_spanner.py b/providers/tests/google/cloud/hooks/test_spanner.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_spanner.py rename to providers/tests/google/cloud/hooks/test_spanner.py index 1633d558166d..3f6af9dcdcaf 100644 --- a/tests/providers/google/cloud/hooks/test_spanner.py +++ b/providers/tests/google/cloud/hooks/test_spanner.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.spanner import SpannerHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_speech_to_text.py b/providers/tests/google/cloud/hooks/test_speech_to_text.py similarity index 97% rename from tests/providers/google/cloud/hooks/test_speech_to_text.py rename to providers/tests/google/cloud/hooks/test_speech_to_text.py index 8cda07197624..97ef56f790b4 100644 --- a/tests/providers/google/cloud/hooks/test_speech_to_text.py +++ b/providers/tests/google/cloud/hooks/test_speech_to_text.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.speech_to_text import CloudSpeechToTextHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID = "project-id" CONFIG = {"encoding": "LINEAR16"} diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/providers/tests/google/cloud/hooks/test_stackdriver.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_stackdriver.py rename to providers/tests/google/cloud/hooks/test_stackdriver.py diff --git a/tests/providers/google/cloud/hooks/test_tasks.py b/providers/tests/google/cloud/hooks/test_tasks.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_tasks.py rename to providers/tests/google/cloud/hooks/test_tasks.py index 9885248db45e..71228ba7e717 100644 --- a/tests/providers/google/cloud/hooks/test_tasks.py +++ b/providers/tests/google/cloud/hooks/test_tasks.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} PROJECT_ID = "test-project" diff --git a/tests/providers/google/cloud/hooks/test_text_to_speech.py b/providers/tests/google/cloud/hooks/test_text_to_speech.py similarity index 97% rename from tests/providers/google/cloud/hooks/test_text_to_speech.py rename to providers/tests/google/cloud/hooks/test_text_to_speech.py index 573c56e098f5..4f3eb9068449 100644 --- a/tests/providers/google/cloud/hooks/test_text_to_speech.py +++ b/providers/tests/google/cloud/hooks/test_text_to_speech.py @@ -29,7 +29,8 @@ from airflow.providers.google.cloud.hooks.text_to_speech import CloudTextToSpeechHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id INPUT = {"text": "test text"} VOICE = {"language_code": "en-US", "ssml_gender": "FEMALE"} diff --git a/tests/providers/google/cloud/hooks/test_translate.py b/providers/tests/google/cloud/hooks/test_translate.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_translate.py rename to providers/tests/google/cloud/hooks/test_translate.py index 068fe60fa373..addc54d430e7 100644 --- a/tests/providers/google/cloud/hooks/test_translate.py +++ b/providers/tests/google/cloud/hooks/test_translate.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.hooks.translate import CloudTranslateHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = "project-id" diff --git a/tests/providers/google/cloud/hooks/test_video_intelligence.py b/providers/tests/google/cloud/hooks/test_video_intelligence.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_video_intelligence.py rename to providers/tests/google/cloud/hooks/test_video_intelligence.py index 75bb9b92f71c..17e047e2da95 100644 --- a/tests/providers/google/cloud/hooks/test_video_intelligence.py +++ b/providers/tests/google/cloud/hooks/test_video_intelligence.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id INPUT_URI = "gs://bucket-name/input-file" OUTPUT_URI = "gs://bucket-name/output-file" diff --git a/tests/providers/google/cloud/hooks/test_vision.py b/providers/tests/google/cloud/hooks/test_vision.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_vision.py rename to providers/tests/google/cloud/hooks/test_vision.py index 814e412ee0fc..6a414d59e1a1 100644 --- a/tests/providers/google/cloud/hooks/test_vision.py +++ b/providers/tests/google/cloud/hooks/test_vision.py @@ -37,7 +37,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = "project-id" PROJECT_ID_TEST_2 = "project-id-2" diff --git a/tests/providers/google/cloud/hooks/test_workflows.py b/providers/tests/google/cloud/hooks/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_workflows.py rename to providers/tests/google/cloud/hooks/test_workflows.py diff --git a/tests/providers/databricks/plugins/__init__.py b/providers/tests/google/cloud/hooks/vertex_ai/__init__.py similarity index 100% rename from tests/providers/databricks/plugins/__init__.py rename to providers/tests/google/cloud/hooks/vertex_ai/__init__.py diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py b/providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py index 2e90cb7a1778..6b64c966f54c 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.auto_ml import AutoMLHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py index 1b2e0e1fbb9e..15efe3e88bac 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py @@ -33,7 +33,8 @@ BatchPredictionJobAsyncHook, BatchPredictionJobHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py index c6f31c48470e..a2cedce2a92a 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py @@ -36,7 +36,8 @@ PipelineState, types, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py b/providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py index f20c9142a2c0..d54eb48c2d68 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.dataset import DatasetHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py index 273a97fa19af..a284eae13a41 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.endpoint_service import EndpointServiceHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py b/providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py index 19723a51b1dd..52a8c417c6ac 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.hooks.vertex_ai.generative_model import ( GenerativeModelHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py index eca6fc17ebaf..05fbf9c12344 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py @@ -37,7 +37,8 @@ HyperparameterTuningJobAsyncHook, HyperparameterTuningJobHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py index 1505e8276d07..c95d3fecd492 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.model_service import ModelServiceHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py index 02068ec57051..21937251c3c5 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py @@ -34,7 +34,8 @@ PipelineState, types, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py similarity index 98% rename from tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py index 987578b7c11e..7e56ca92813a 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py @@ -28,7 +28,8 @@ from airflow.providers.google.cloud.hooks.vertex_ai.prediction_service import ( PredictionServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/databricks/sensors/__init__.py b/providers/tests/google/cloud/links/__init__.py similarity index 100% rename from tests/providers/databricks/sensors/__init__.py rename to providers/tests/google/cloud/links/__init__.py diff --git a/tests/providers/google/cloud/links/test_translate.py b/providers/tests/google/cloud/links/test_translate.py similarity index 100% rename from tests/providers/google/cloud/links/test_translate.py rename to providers/tests/google/cloud/links/test_translate.py diff --git a/tests/providers/databricks/utils/__init__.py b/providers/tests/google/cloud/log/__init__.py similarity index 100% rename from tests/providers/databricks/utils/__init__.py rename to providers/tests/google/cloud/log/__init__.py diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler.py b/providers/tests/google/cloud/log/test_gcs_task_handler.py similarity index 98% rename from tests/providers/google/cloud/log/test_gcs_task_handler.py rename to providers/tests/google/cloud/log/test_gcs_task_handler.py index 7653c4d837be..9f27c618de29 100644 --- a/tests/providers/google/cloud/log/test_gcs_task_handler.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler.py @@ -27,8 +27,9 @@ from airflow.providers.google.cloud.log.gcs_task_handler import GCSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs @pytest.mark.db_test diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py similarity index 84% rename from tests/providers/google/cloud/log/test_gcs_task_handler_system.py rename to providers/tests/google/cloud/log/test_gcs_task_handler_system.py index 406bb0387df5..eb1ce08d14f5 100644 --- a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py @@ -29,14 +29,15 @@ from airflow.models import DagBag, TaskInstance from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_runs -from tests.test_utils.gcp_system_helpers import ( + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs +from dev.tests_common.test_utils.gcp_system_helpers import ( GoogleSystemTest, provide_gcp_context, resolve_full_gcp_key_path, ) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY @pytest.mark.system("google") @@ -84,12 +85,15 @@ def test_should_read_logs(self, session): self.assert_remote_logs("INFO - Task exited with return code 0", ti) def assert_remote_logs(self, expected_message, ti): - with provide_gcp_context(GCP_GCS_KEY), conf_vars( - { - ("logging", "remote_logging"): "True", - ("logging", "remote_base_log_folder"): f"gs://{self.bucket_name}/path/to/logs", - ("logging", "remote_log_conn_id"): "google_cloud_default", - } + with ( + provide_gcp_context(GCP_GCS_KEY), + conf_vars( + { + ("logging", "remote_logging"): "True", + ("logging", "remote_base_log_folder"): f"gs://{self.bucket_name}/path/to/logs", + ("logging", "remote_log_conn_id"): "google_cloud_default", + } + ), ): from airflow.config_templates import airflow_local_settings diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py similarity index 99% rename from tests/providers/google/cloud/log/test_stackdriver_task_handler.py rename to providers/tests/google/cloud/log/test_stackdriver_task_handler.py index 1ac8b91df5a6..783f1a34b762 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py @@ -29,9 +29,10 @@ from airflow.providers.google.cloud.log.stackdriver_task_handler import StackdriverTaskHandler from airflow.utils import timezone from airflow.utils.state import TaskInstanceState -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs def _create_list_log_entries_response_mock(messages, token): diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py similarity index 88% rename from tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py rename to providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py index b5daac0d8149..a53dd43d08fe 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py @@ -29,14 +29,15 @@ from airflow.models import TaskInstance from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_STACKDRIVER -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs -from tests.test_utils.gcp_system_helpers import ( + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.gcp_system_helpers import ( GoogleSystemTest, provide_gcp_context, resolve_full_gcp_key_path, ) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_STACKDRIVER @pytest.mark.system("google") @@ -86,11 +87,14 @@ def test_should_support_adc(self, session): self.assert_remote_logs("terminated with exit code 0", ti) def assert_remote_logs(self, expected_message, ti): - with provide_gcp_context(GCP_STACKDRIVER), conf_vars( - { - ("logging", "remote_logging"): "True", - ("logging", "remote_base_log_folder"): f"stackdriver://{self.log_name}", - } + with ( + provide_gcp_context(GCP_STACKDRIVER), + conf_vars( + { + ("logging", "remote_logging"): "True", + ("logging", "remote_base_log_folder"): f"stackdriver://{self.log_name}", + } + ), ): from airflow.config_templates import airflow_local_settings diff --git a/tests/providers/dbt/__init__.py b/providers/tests/google/cloud/openlineage/__init__.py similarity index 100% rename from tests/providers/dbt/__init__.py rename to providers/tests/google/cloud/openlineage/__init__.py diff --git a/tests/providers/google/cloud/openlineage/test_mixins.py b/providers/tests/google/cloud/openlineage/test_mixins.py similarity index 95% rename from tests/providers/google/cloud/openlineage/test_mixins.py rename to providers/tests/google/cloud/openlineage/test_mixins.py index f7feade65d36..5229db3a6071 100644 --- a/tests/providers/google/cloud/openlineage/test_mixins.py +++ b/providers/tests/google/cloud/openlineage/test_mixins.py @@ -17,6 +17,7 @@ from __future__ import annotations import json +import os from unittest.mock import MagicMock import pytest @@ -35,17 +36,17 @@ ) -def read_file_json(file): - with open(file=file) as f: - return json.loads(f.read()) +def read_common_json_file(rel: str): + with open(os.path.dirname(__file__) + "/../utils/" + rel) as f: + return json.load(f) class TableMock(MagicMock): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.inputs = [ - read_file_json("tests/providers/google/cloud/utils/table_details.json"), - read_file_json("tests/providers/google/cloud/utils/out_table_details.json"), + read_common_json_file("table_details.json"), + read_common_json_file("out_table_details.json"), ] @property @@ -55,8 +56,8 @@ def _properties(self): class TestBigQueryOpenLineageMixin: def setup_method(self): - self.job_details = read_file_json("tests/providers/google/cloud/utils/job_details.json") - self.script_job_details = read_file_json("tests/providers/google/cloud/utils/script_job_details.json") + self.job_details = read_common_json_file("job_details.json") + self.script_job_details = read_common_json_file("script_job_details.json") hook = MagicMock() self.client = MagicMock() diff --git a/tests/providers/google/cloud/openlineage/test_utils.py b/providers/tests/google/cloud/openlineage/test_utils.py similarity index 97% rename from tests/providers/google/cloud/openlineage/test_utils.py rename to providers/tests/google/cloud/openlineage/test_utils.py index e47f14332f45..4f2db0038b7b 100644 --- a/tests/providers/google/cloud/openlineage/test_utils.py +++ b/providers/tests/google/cloud/openlineage/test_utils.py @@ -65,8 +65,8 @@ class TableMock(MagicMock): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.inputs = [ - read_file_json("tests/providers/google/cloud/utils/table_details.json"), - read_file_json("tests/providers/google/cloud/utils/out_table_details.json"), + read_file_json("providers/tests/google/cloud/utils/table_details.json"), + read_file_json("providers/tests/google/cloud/utils/out_table_details.json"), ] @property diff --git a/tests/providers/dbt/cloud/__init__.py b/providers/tests/google/cloud/operators/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/__init__.py rename to providers/tests/google/cloud/operators/__init__.py diff --git a/tests/providers/dbt/cloud/hooks/__init__.py b/providers/tests/google/cloud/operators/source/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/hooks/__init__.py rename to providers/tests/google/cloud/operators/source/__init__.py diff --git a/tests/providers/dbt/cloud/operators/__init__.py b/providers/tests/google/cloud/operators/source/source_prefix/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/operators/__init__.py rename to providers/tests/google/cloud/operators/source/source_prefix/__init__.py diff --git a/tests/providers/google/cloud/operators/test_automl.py b/providers/tests/google/cloud/operators/test_automl.py similarity index 100% rename from tests/providers/google/cloud/operators/test_automl.py rename to providers/tests/google/cloud/operators/test_automl.py diff --git a/tests/providers/google/cloud/operators/test_bigquery.py b/providers/tests/google/cloud/operators/test_bigquery.py similarity index 99% rename from tests/providers/google/cloud/operators/test_bigquery.py rename to providers/tests/google/cloud/operators/test_bigquery.py index ab9e8e6ac019..3836d737662f 100644 --- a/tests/providers/google/cloud/operators/test_bigquery.py +++ b/providers/tests/google/cloud/operators/test_bigquery.py @@ -18,6 +18,7 @@ from __future__ import annotations import json +import os from contextlib import suppress from unittest import mock from unittest.mock import ANY, MagicMock @@ -73,7 +74,13 @@ ) from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags, clear_db_xcom + +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, + clear_db_xcom, +) pytestmark = pytest.mark.db_test @@ -1840,7 +1847,7 @@ def test_execute_openlineage_events(self, mock_hook): assert result == real_job_id - with open(file="tests/providers/google/cloud/utils/job_details.json") as f: + with open(os.path.dirname(__file__) + "/../utils/job_details.json") as f: job_details = json.loads(f.read()) mock_hook.return_value.get_client.return_value.get_job.return_value._properties = job_details mock_hook.return_value.get_client.return_value.get_table.side_effect = Exception() diff --git a/tests/providers/google/cloud/operators/test_bigquery_dts.py b/providers/tests/google/cloud/operators/test_bigquery_dts.py similarity index 98% rename from tests/providers/google/cloud/operators/test_bigquery_dts.py rename to providers/tests/google/cloud/operators/test_bigquery_dts.py index f44479bbce9e..f50c42805b49 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_dts.py +++ b/providers/tests/google/cloud/operators/test_bigquery_dts.py @@ -27,7 +27,8 @@ BigQueryDataTransferServiceStartTransferRunsOperator, BigQueryDeleteDataTransferConfigOperator, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS PROJECT_ID = "id" diff --git a/tests/providers/google/cloud/operators/test_bigtable.py b/providers/tests/google/cloud/operators/test_bigtable.py similarity index 100% rename from tests/providers/google/cloud/operators/test_bigtable.py rename to providers/tests/google/cloud/operators/test_bigtable.py diff --git a/tests/providers/google/cloud/operators/test_cloud_base.py b/providers/tests/google/cloud/operators/test_cloud_base.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_base.py rename to providers/tests/google/cloud/operators/test_cloud_base.py diff --git a/tests/providers/google/cloud/operators/test_cloud_batch.py b/providers/tests/google/cloud/operators/test_cloud_batch.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_batch.py rename to providers/tests/google/cloud/operators/test_cloud_batch.py diff --git a/tests/providers/google/cloud/operators/test_cloud_build.py b/providers/tests/google/cloud/operators/test_cloud_build.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_build.py rename to providers/tests/google/cloud/operators/test_cloud_build.py diff --git a/tests/providers/google/cloud/operators/test_cloud_composer.py b/providers/tests/google/cloud/operators/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_composer.py rename to providers/tests/google/cloud/operators/test_cloud_composer.py diff --git a/tests/providers/google/cloud/operators/test_cloud_memorystore.py b/providers/tests/google/cloud/operators/test_cloud_memorystore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_memorystore.py rename to providers/tests/google/cloud/operators/test_cloud_memorystore.py diff --git a/tests/providers/google/cloud/operators/test_cloud_run.py b/providers/tests/google/cloud/operators/test_cloud_run.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_run.py rename to providers/tests/google/cloud/operators/test_cloud_run.py diff --git a/tests/providers/google/cloud/operators/test_cloud_sql.py b/providers/tests/google/cloud/operators/test_cloud_sql.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_sql.py rename to providers/tests/google/cloud/operators/test_cloud_sql.py diff --git a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/operators/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/operators/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/operators/test_compute.py b/providers/tests/google/cloud/operators/test_compute.py similarity index 100% rename from tests/providers/google/cloud/operators/test_compute.py rename to providers/tests/google/cloud/operators/test_compute.py diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py b/providers/tests/google/cloud/operators/test_datacatalog.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datacatalog.py rename to providers/tests/google/cloud/operators/test_datacatalog.py diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/providers/tests/google/cloud/operators/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataflow.py rename to providers/tests/google/cloud/operators/test_dataflow.py diff --git a/tests/providers/google/cloud/operators/test_dataform.py b/providers/tests/google/cloud/operators/test_dataform.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataform.py rename to providers/tests/google/cloud/operators/test_dataform.py diff --git a/tests/providers/google/cloud/operators/test_datafusion.py b/providers/tests/google/cloud/operators/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datafusion.py rename to providers/tests/google/cloud/operators/test_datafusion.py diff --git a/tests/providers/google/cloud/operators/test_datapipeline.py b/providers/tests/google/cloud/operators/test_datapipeline.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datapipeline.py rename to providers/tests/google/cloud/operators/test_datapipeline.py diff --git a/tests/providers/google/cloud/operators/test_dataplex.py b/providers/tests/google/cloud/operators/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataplex.py rename to providers/tests/google/cloud/operators/test_dataplex.py diff --git a/tests/providers/google/cloud/operators/test_dataprep.py b/providers/tests/google/cloud/operators/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataprep.py rename to providers/tests/google/cloud/operators/test_dataprep.py diff --git a/tests/providers/google/cloud/operators/test_dataprep_system.py b/providers/tests/google/cloud/operators/test_dataprep_system.py similarity index 91% rename from tests/providers/google/cloud/operators/test_dataprep_system.py rename to providers/tests/google/cloud/operators/test_dataprep_system.py index fcba01fe599c..96f47fa3e365 100644 --- a/tests/providers/google/cloud/operators/test_dataprep_system.py +++ b/providers/tests/google/cloud/operators/test_dataprep_system.py @@ -24,8 +24,9 @@ from airflow.models import Connection from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest + +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest TOKEN = os.environ.get("DATAPREP_TOKEN") EXTRA = {"token": TOKEN} diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/providers/tests/google/cloud/operators/test_dataproc.py similarity index 99% rename from tests/providers/google/cloud/operators/test_dataproc.py rename to providers/tests/google/cloud/operators/test_dataproc.py index 58b38125ee1d..cf4bffa3a092 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/providers/tests/google/cloud/operators/test_dataproc.py @@ -79,8 +79,9 @@ from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom AIRFLOW_VERSION_LABEL = "v" + str(AIRFLOW_VERSION).replace(".", "-").replace("+", "-") diff --git a/tests/providers/google/cloud/operators/test_dataproc_metastore.py b/providers/tests/google/cloud/operators/test_dataproc_metastore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataproc_metastore.py rename to providers/tests/google/cloud/operators/test_dataproc_metastore.py diff --git a/tests/providers/google/cloud/operators/test_datastore.py b/providers/tests/google/cloud/operators/test_datastore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datastore.py rename to providers/tests/google/cloud/operators/test_datastore.py diff --git a/tests/providers/google/cloud/operators/test_datastore_system.py b/providers/tests/google/cloud/operators/test_datastore_system.py similarity index 89% rename from tests/providers/google/cloud/operators/test_datastore_system.py rename to providers/tests/google/cloud/operators/test_datastore_system.py index 5835a46673db..a98215a5317a 100644 --- a/tests/providers/google/cloud/operators/test_datastore_system.py +++ b/providers/tests/google/cloud/operators/test_datastore_system.py @@ -21,8 +21,12 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test") diff --git a/tests/providers/google/cloud/operators/test_dlp.py b/providers/tests/google/cloud/operators/test_dlp.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dlp.py rename to providers/tests/google/cloud/operators/test_dlp.py diff --git a/tests/providers/google/cloud/operators/test_functions.py b/providers/tests/google/cloud/operators/test_functions.py similarity index 100% rename from tests/providers/google/cloud/operators/test_functions.py rename to providers/tests/google/cloud/operators/test_functions.py diff --git a/tests/providers/google/cloud/operators/test_gcs.py b/providers/tests/google/cloud/operators/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/operators/test_gcs.py rename to providers/tests/google/cloud/operators/test_gcs.py diff --git a/tests/providers/google/cloud/operators/test_kubernetes_engine.py b/providers/tests/google/cloud/operators/test_kubernetes_engine.py similarity index 100% rename from tests/providers/google/cloud/operators/test_kubernetes_engine.py rename to providers/tests/google/cloud/operators/test_kubernetes_engine.py diff --git a/tests/providers/google/cloud/operators/test_life_sciences.py b/providers/tests/google/cloud/operators/test_life_sciences.py similarity index 100% rename from tests/providers/google/cloud/operators/test_life_sciences.py rename to providers/tests/google/cloud/operators/test_life_sciences.py diff --git a/tests/providers/google/cloud/operators/test_looker.py b/providers/tests/google/cloud/operators/test_looker.py similarity index 98% rename from tests/providers/google/cloud/operators/test_looker.py rename to providers/tests/google/cloud/operators/test_looker.py index 5dfea013a251..b368259c503e 100644 --- a/tests/providers/google/cloud/operators/test_looker.py +++ b/providers/tests/google/cloud/operators/test_looker.py @@ -25,7 +25,8 @@ from airflow.models import DAG, DagBag from airflow.providers.google.cloud.operators.looker import LookerStartPdtBuildOperator from airflow.utils.timezone import datetime -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom OPERATOR_PATH = "airflow.providers.google.cloud.operators.looker.{}" diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/providers/tests/google/cloud/operators/test_mlengine.py similarity index 100% rename from tests/providers/google/cloud/operators/test_mlengine.py rename to providers/tests/google/cloud/operators/test_mlengine.py diff --git a/tests/providers/google/cloud/operators/test_natural_language.py b/providers/tests/google/cloud/operators/test_natural_language.py similarity index 100% rename from tests/providers/google/cloud/operators/test_natural_language.py rename to providers/tests/google/cloud/operators/test_natural_language.py diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/providers/tests/google/cloud/operators/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/operators/test_pubsub.py rename to providers/tests/google/cloud/operators/test_pubsub.py diff --git a/tests/providers/google/cloud/operators/test_spanner.py b/providers/tests/google/cloud/operators/test_spanner.py similarity index 100% rename from tests/providers/google/cloud/operators/test_spanner.py rename to providers/tests/google/cloud/operators/test_spanner.py diff --git a/tests/providers/google/cloud/operators/test_speech_to_text.py b/providers/tests/google/cloud/operators/test_speech_to_text.py similarity index 100% rename from tests/providers/google/cloud/operators/test_speech_to_text.py rename to providers/tests/google/cloud/operators/test_speech_to_text.py diff --git a/tests/providers/google/cloud/operators/test_stackdriver.py b/providers/tests/google/cloud/operators/test_stackdriver.py similarity index 100% rename from tests/providers/google/cloud/operators/test_stackdriver.py rename to providers/tests/google/cloud/operators/test_stackdriver.py diff --git a/tests/providers/google/cloud/operators/test_tasks.py b/providers/tests/google/cloud/operators/test_tasks.py similarity index 100% rename from tests/providers/google/cloud/operators/test_tasks.py rename to providers/tests/google/cloud/operators/test_tasks.py diff --git a/tests/providers/google/cloud/operators/test_text_to_speech.py b/providers/tests/google/cloud/operators/test_text_to_speech.py similarity index 100% rename from tests/providers/google/cloud/operators/test_text_to_speech.py rename to providers/tests/google/cloud/operators/test_text_to_speech.py diff --git a/tests/providers/google/cloud/operators/test_translate.py b/providers/tests/google/cloud/operators/test_translate.py similarity index 100% rename from tests/providers/google/cloud/operators/test_translate.py rename to providers/tests/google/cloud/operators/test_translate.py diff --git a/tests/providers/google/cloud/operators/test_translate_speech.py b/providers/tests/google/cloud/operators/test_translate_speech.py similarity index 100% rename from tests/providers/google/cloud/operators/test_translate_speech.py rename to providers/tests/google/cloud/operators/test_translate_speech.py diff --git a/tests/providers/google/cloud/operators/test_vertex_ai.py b/providers/tests/google/cloud/operators/test_vertex_ai.py similarity index 99% rename from tests/providers/google/cloud/operators/test_vertex_ai.py rename to providers/tests/google/cloud/operators/test_vertex_ai.py index 25deb188aaa4..f79542006e26 100644 --- a/tests/providers/google/cloud/operators/test_vertex_ai.py +++ b/providers/tests/google/cloud/operators/test_vertex_ai.py @@ -2178,9 +2178,12 @@ def test_execute_deferrable(self, mock_hook, mock_link_persist): deferrable=True, ) context = {"ti": mock.MagicMock()} - with pytest.raises(TaskDeferred) as exception_info, pytest.warns( - AirflowProviderDeprecationWarning, - match=SYNC_DEPRECATION_WARNING.format("28.08.2024"), + with ( + pytest.raises(TaskDeferred) as exception_info, + pytest.warns( + AirflowProviderDeprecationWarning, + match=SYNC_DEPRECATION_WARNING.format("28.08.2024"), + ), ): op.execute(context=context) @@ -2587,9 +2590,12 @@ def test_deferrable_sync_error(self): parallel_trial_count=3, deferrable=True, ) - with pytest.raises(AirflowException), pytest.warns( - AirflowProviderDeprecationWarning, - match=SYNC_DEPRECATION_WARNING.format("01.09.2024"), + with ( + pytest.raises(AirflowException), + pytest.warns( + AirflowProviderDeprecationWarning, + match=SYNC_DEPRECATION_WARNING.format("01.09.2024"), + ), ): op.execute(context={"ti": mock.MagicMock()}) diff --git a/tests/providers/google/cloud/operators/test_video_intelligence.py b/providers/tests/google/cloud/operators/test_video_intelligence.py similarity index 100% rename from tests/providers/google/cloud/operators/test_video_intelligence.py rename to providers/tests/google/cloud/operators/test_video_intelligence.py diff --git a/tests/providers/google/cloud/operators/test_vision.py b/providers/tests/google/cloud/operators/test_vision.py similarity index 100% rename from tests/providers/google/cloud/operators/test_vision.py rename to providers/tests/google/cloud/operators/test_vision.py diff --git a/tests/providers/google/cloud/operators/test_workflows.py b/providers/tests/google/cloud/operators/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/operators/test_workflows.py rename to providers/tests/google/cloud/operators/test_workflows.py diff --git a/tests/providers/dbt/cloud/sensors/__init__.py b/providers/tests/google/cloud/operators/vertex_ai/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/sensors/__init__.py rename to providers/tests/google/cloud/operators/vertex_ai/__init__.py diff --git a/tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py b/providers/tests/google/cloud/operators/vertex_ai/test_generative_model.py similarity index 100% rename from tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py rename to providers/tests/google/cloud/operators/vertex_ai/test_generative_model.py diff --git a/tests/providers/dbt/cloud/test_data/__init__.py b/providers/tests/google/cloud/secrets/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/test_data/__init__.py rename to providers/tests/google/cloud/secrets/__init__.py diff --git a/tests/providers/google/cloud/secrets/test_secret_manager.py b/providers/tests/google/cloud/secrets/test_secret_manager.py similarity index 100% rename from tests/providers/google/cloud/secrets/test_secret_manager.py rename to providers/tests/google/cloud/secrets/test_secret_manager.py diff --git a/tests/providers/google/cloud/secrets/test_secret_manager_system.py b/providers/tests/google/cloud/secrets/test_secret_manager_system.py similarity index 95% rename from tests/providers/google/cloud/secrets/test_secret_manager_system.py rename to providers/tests/google/cloud/secrets/test_secret_manager_system.py index 6f0e88282dc9..b9b8b6d0cac8 100644 --- a/tests/providers/google/cloud/secrets/test_secret_manager_system.py +++ b/providers/tests/google/cloud/secrets/test_secret_manager_system.py @@ -23,8 +23,8 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY BACKEND_IMPORT_PATH = "airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend" diff --git a/tests/providers/dbt/cloud/triggers/__init__.py b/providers/tests/google/cloud/sensors/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/triggers/__init__.py rename to providers/tests/google/cloud/sensors/__init__.py diff --git a/tests/providers/google/cloud/sensors/test_bigquery.py b/providers/tests/google/cloud/sensors/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigquery.py rename to providers/tests/google/cloud/sensors/test_bigquery.py diff --git a/tests/providers/google/cloud/sensors/test_bigquery_dts.py b/providers/tests/google/cloud/sensors/test_bigquery_dts.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigquery_dts.py rename to providers/tests/google/cloud/sensors/test_bigquery_dts.py diff --git a/tests/providers/google/cloud/sensors/test_bigtable.py b/providers/tests/google/cloud/sensors/test_bigtable.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigtable.py rename to providers/tests/google/cloud/sensors/test_bigtable.py diff --git a/tests/providers/google/cloud/sensors/test_cloud_composer.py b/providers/tests/google/cloud/sensors/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_cloud_composer.py rename to providers/tests/google/cloud/sensors/test_cloud_composer.py diff --git a/tests/providers/google/cloud/sensors/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/sensors/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/sensors/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/sensors/test_dataflow.py b/providers/tests/google/cloud/sensors/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataflow.py rename to providers/tests/google/cloud/sensors/test_dataflow.py diff --git a/tests/providers/google/cloud/sensors/test_datafusion.py b/providers/tests/google/cloud/sensors/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_datafusion.py rename to providers/tests/google/cloud/sensors/test_datafusion.py diff --git a/tests/providers/google/cloud/sensors/test_dataplex.py b/providers/tests/google/cloud/sensors/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataplex.py rename to providers/tests/google/cloud/sensors/test_dataplex.py diff --git a/tests/providers/google/cloud/sensors/test_dataprep.py b/providers/tests/google/cloud/sensors/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataprep.py rename to providers/tests/google/cloud/sensors/test_dataprep.py diff --git a/tests/providers/google/cloud/sensors/test_dataproc.py b/providers/tests/google/cloud/sensors/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataproc.py rename to providers/tests/google/cloud/sensors/test_dataproc.py diff --git a/tests/providers/google/cloud/sensors/test_dataproc_metastore.py b/providers/tests/google/cloud/sensors/test_dataproc_metastore.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataproc_metastore.py rename to providers/tests/google/cloud/sensors/test_dataproc_metastore.py diff --git a/tests/providers/google/cloud/sensors/test_gcs.py b/providers/tests/google/cloud/sensors/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_gcs.py rename to providers/tests/google/cloud/sensors/test_gcs.py diff --git a/tests/providers/google/cloud/sensors/test_looker.py b/providers/tests/google/cloud/sensors/test_looker.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_looker.py rename to providers/tests/google/cloud/sensors/test_looker.py diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/providers/tests/google/cloud/sensors/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_pubsub.py rename to providers/tests/google/cloud/sensors/test_pubsub.py diff --git a/tests/providers/google/cloud/sensors/test_tasks.py b/providers/tests/google/cloud/sensors/test_tasks.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_tasks.py rename to providers/tests/google/cloud/sensors/test_tasks.py diff --git a/tests/providers/google/cloud/sensors/test_workflows.py b/providers/tests/google/cloud/sensors/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_workflows.py rename to providers/tests/google/cloud/sensors/test_workflows.py diff --git a/tests/providers/dbt/cloud/utils/__init__.py b/providers/tests/google/cloud/transfers/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/utils/__init__.py rename to providers/tests/google/cloud/transfers/__init__.py diff --git a/tests/providers/google/cloud/transfers/test_adls_to_gcs.py b/providers/tests/google/cloud/transfers/test_adls_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_adls_to_gcs.py rename to providers/tests/google/cloud/transfers/test_adls_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_azure_blob_to_gcs.py b/providers/tests/google/cloud/transfers/test_azure_blob_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_azure_blob_to_gcs.py rename to providers/tests/google/cloud/transfers/test_azure_blob_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py b/providers/tests/google/cloud/transfers/test_azure_fileshare_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py rename to providers/tests/google/cloud/transfers/test_azure_fileshare_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py b/providers/tests/google/cloud/transfers/test_bigquery_to_bigquery.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_bigquery.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py b/providers/tests/google/cloud/transfers/test_bigquery_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py b/providers/tests/google/cloud/transfers/test_bigquery_to_mssql.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_mssql.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py b/providers/tests/google/cloud/transfers/test_bigquery_to_mysql.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_mysql.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_postgres.py b/providers/tests/google/cloud/transfers/test_bigquery_to_postgres.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_postgres.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_postgres.py diff --git a/tests/providers/google/cloud/transfers/test_calendar_to_gcs.py b/providers/tests/google/cloud/transfers/test_calendar_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_calendar_to_gcs.py rename to providers/tests/google/cloud/transfers/test_calendar_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py b/providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py similarity index 91% rename from tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py rename to providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py index 75e3cbac1789..caa9d5ea76a1 100644 --- a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py @@ -46,13 +46,17 @@ def test_execute(self): except cassandra.DependencyException: pytest.skip("cassandra-driver not installed with libev support. Skipping test.") - with mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile" - ) as mock_tempfile, mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload" - ) as mock_upload, mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook" - ) as mock_hook: + with ( + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile" + ) as mock_tempfile, + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload" + ) as mock_upload, + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook" + ) as mock_hook, + ): mock_tempfile.return_value.name = TMP_FILE_NAME operator = CassandraToGCSOperator( task_id=TASK_ID, diff --git a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py rename to providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py similarity index 93% rename from tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py rename to providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py index 85c6f69ae59e..ba24a0c34da2 100644 --- a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py @@ -26,8 +26,13 @@ from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.utils.process_utils import patch_environ -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") FACEBOOK_KEY = "facebook.json" diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py b/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py rename to providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py b/providers/tests/google/cloud/transfers/test_gcs_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_gcs.py rename to providers/tests/google/cloud/transfers/test_gcs_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_local.py b/providers/tests/google/cloud/transfers/test_gcs_to_local.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_local.py rename to providers/tests/google/cloud/transfers/test_gcs_to_local.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py b/providers/tests/google/cloud/transfers/test_gcs_to_sftp.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_sftp.py rename to providers/tests/google/cloud/transfers/test_gcs_to_sftp.py diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_gcs.py b/providers/tests/google/cloud/transfers/test_gdrive_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gdrive_to_gcs.py rename to providers/tests/google/cloud/transfers/test_gdrive_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_local.py b/providers/tests/google/cloud/transfers/test_gdrive_to_local.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gdrive_to_local.py rename to providers/tests/google/cloud/transfers/test_gdrive_to_local.py diff --git a/tests/providers/google/cloud/transfers/test_local_to_gcs.py b/providers/tests/google/cloud/transfers/test_local_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_local_to_gcs.py rename to providers/tests/google/cloud/transfers/test_local_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_mssql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_mssql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_mysql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_mysql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py b/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_oracle_to_gcs.py rename to providers/tests/google/cloud/transfers/test_oracle_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py b/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_postgres_to_gcs.py rename to providers/tests/google/cloud/transfers/test_postgres_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_s3_to_gcs.py b/providers/tests/google/cloud/transfers/test_s3_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_s3_to_gcs.py rename to providers/tests/google/cloud/transfers/test_s3_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py rename to providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py similarity index 84% rename from tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py rename to providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py index c2eee9bd0911..afd0856fad24 100644 --- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py @@ -20,9 +20,13 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context -from tests.test_utils.salesforce_system_helpers import provide_salesforce_connection +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from dev.tests_common.test_utils.salesforce_system_helpers import provide_salesforce_connection +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") SALESFORCE_KEY = "salesforce.json" diff --git a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py b/providers/tests/google/cloud/transfers/test_sftp_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sftp_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sftp_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_sheets_to_gcs.py b/providers/tests/google/cloud/transfers/test_sheets_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sheets_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sheets_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py b/providers/tests/google/cloud/transfers/test_sql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/google/cloud/transfers/test_trino_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_trino_to_gcs.py rename to providers/tests/google/cloud/transfers/test_trino_to_gcs.py diff --git a/tests/providers/discord/notifications/__init__.py b/providers/tests/google/cloud/triggers/__init__.py similarity index 100% rename from tests/providers/discord/notifications/__init__.py rename to providers/tests/google/cloud/triggers/__init__.py diff --git a/tests/providers/google/cloud/triggers/test_bigquery.py b/providers/tests/google/cloud/triggers/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_bigquery.py rename to providers/tests/google/cloud/triggers/test_bigquery.py diff --git a/tests/providers/google/cloud/triggers/test_bigquery_dts.py b/providers/tests/google/cloud/triggers/test_bigquery_dts.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_bigquery_dts.py rename to providers/tests/google/cloud/triggers/test_bigquery_dts.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_batch.py b/providers/tests/google/cloud/triggers/test_cloud_batch.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_batch.py rename to providers/tests/google/cloud/triggers/test_cloud_batch.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_build.py b/providers/tests/google/cloud/triggers/test_cloud_build.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_build.py rename to providers/tests/google/cloud/triggers/test_cloud_build.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_composer.py b/providers/tests/google/cloud/triggers/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_composer.py rename to providers/tests/google/cloud/triggers/test_cloud_composer.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_run.py b/providers/tests/google/cloud/triggers/test_cloud_run.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_run.py rename to providers/tests/google/cloud/triggers/test_cloud_run.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_sql.py b/providers/tests/google/cloud/triggers/test_cloud_sql.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_sql.py rename to providers/tests/google/cloud/triggers/test_cloud_sql.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/triggers/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/triggers/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/triggers/test_dataflow.py b/providers/tests/google/cloud/triggers/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataflow.py rename to providers/tests/google/cloud/triggers/test_dataflow.py diff --git a/tests/providers/google/cloud/triggers/test_datafusion.py b/providers/tests/google/cloud/triggers/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_datafusion.py rename to providers/tests/google/cloud/triggers/test_datafusion.py diff --git a/tests/providers/google/cloud/triggers/test_dataplex.py b/providers/tests/google/cloud/triggers/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataplex.py rename to providers/tests/google/cloud/triggers/test_dataplex.py diff --git a/tests/providers/google/cloud/triggers/test_dataproc.py b/providers/tests/google/cloud/triggers/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataproc.py rename to providers/tests/google/cloud/triggers/test_dataproc.py diff --git a/tests/providers/google/cloud/triggers/test_gcs.py b/providers/tests/google/cloud/triggers/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_gcs.py rename to providers/tests/google/cloud/triggers/test_gcs.py diff --git a/tests/providers/google/cloud/triggers/test_kubernetes_engine.py b/providers/tests/google/cloud/triggers/test_kubernetes_engine.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_kubernetes_engine.py rename to providers/tests/google/cloud/triggers/test_kubernetes_engine.py diff --git a/tests/providers/google/cloud/triggers/test_mlengine.py b/providers/tests/google/cloud/triggers/test_mlengine.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_mlengine.py rename to providers/tests/google/cloud/triggers/test_mlengine.py diff --git a/tests/providers/google/cloud/triggers/test_pubsub.py b/providers/tests/google/cloud/triggers/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_pubsub.py rename to providers/tests/google/cloud/triggers/test_pubsub.py diff --git a/tests/providers/google/cloud/triggers/test_vertex_ai.py b/providers/tests/google/cloud/triggers/test_vertex_ai.py similarity index 99% rename from tests/providers/google/cloud/triggers/test_vertex_ai.py rename to providers/tests/google/cloud/triggers/test_vertex_ai.py index 946ebf49487f..e2e588a2bfdd 100644 --- a/tests/providers/google/cloud/triggers/test_vertex_ai.py +++ b/providers/tests/google/cloud/triggers/test_vertex_ai.py @@ -46,7 +46,8 @@ RunPipelineJobTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TEST_CONN_ID = "test_connection" TEST_PROJECT_ID = "test_propject_id" diff --git a/tests/providers/ftp/hooks/__init__.py b/providers/tests/google/cloud/utils/__init__.py similarity index 100% rename from tests/providers/ftp/hooks/__init__.py rename to providers/tests/google/cloud/utils/__init__.py diff --git a/tests/providers/google/cloud/utils/airflow_util.py b/providers/tests/google/cloud/utils/airflow_util.py similarity index 100% rename from tests/providers/google/cloud/utils/airflow_util.py rename to providers/tests/google/cloud/utils/airflow_util.py diff --git a/tests/providers/google/cloud/utils/base_gcp_mock.py b/providers/tests/google/cloud/utils/base_gcp_mock.py similarity index 100% rename from tests/providers/google/cloud/utils/base_gcp_mock.py rename to providers/tests/google/cloud/utils/base_gcp_mock.py diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/providers/tests/google/cloud/utils/gcp_authenticator.py similarity index 98% rename from tests/providers/google/cloud/utils/gcp_authenticator.py rename to providers/tests/google/cloud/utils/gcp_authenticator.py index 7c95e57dc328..6bb11b260644 100644 --- a/tests/providers/google/cloud/utils/gcp_authenticator.py +++ b/providers/tests/google/cloud/utils/gcp_authenticator.py @@ -25,9 +25,10 @@ from airflow.exceptions import AirflowException from airflow.models import Connection +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import CommandExecutor + # Please keep these variables in alphabetical order. -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import CommandExecutor GCP_AI_KEY = "gcp_ai.json" GCP_BIGQUERY_KEY = "gcp_bigquery.json" diff --git a/tests/providers/google/cloud/utils/job_details.json b/providers/tests/google/cloud/utils/job_details.json similarity index 100% rename from tests/providers/google/cloud/utils/job_details.json rename to providers/tests/google/cloud/utils/job_details.json diff --git a/tests/providers/google/cloud/utils/out_table_details.json b/providers/tests/google/cloud/utils/out_table_details.json similarity index 100% rename from tests/providers/google/cloud/utils/out_table_details.json rename to providers/tests/google/cloud/utils/out_table_details.json diff --git a/tests/providers/google/cloud/utils/script_job_details.json b/providers/tests/google/cloud/utils/script_job_details.json similarity index 100% rename from tests/providers/google/cloud/utils/script_job_details.json rename to providers/tests/google/cloud/utils/script_job_details.json diff --git a/tests/providers/google/cloud/utils/table_details.json b/providers/tests/google/cloud/utils/table_details.json similarity index 100% rename from tests/providers/google/cloud/utils/table_details.json rename to providers/tests/google/cloud/utils/table_details.json diff --git a/tests/providers/google/cloud/utils/test_credentials_provider.py b/providers/tests/google/cloud/utils/test_credentials_provider.py similarity index 98% rename from tests/providers/google/cloud/utils/test_credentials_provider.py rename to providers/tests/google/cloud/utils/test_credentials_provider.py index 3cab06cee8a4..2e6873775745 100644 --- a/tests/providers/google/cloud/utils/test_credentials_provider.py +++ b/providers/tests/google/cloud/utils/test_credentials_provider.py @@ -319,9 +319,10 @@ def test_get_credentials_and_project_id_with_service_account_info( @mock.patch("google.auth.load_credentials_from_file", return_value=("CREDENTIALS", "PROJECT_ID")) def test_get_credentials_using_credential_config_file(self, mock_load_credentials_from_file, caplog): - with caplog.at_level( - level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME - ), NamedTemporaryFile() as temp_file: + with ( + caplog.at_level(level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME), + NamedTemporaryFile() as temp_file, + ): caplog.clear() result = get_credentials_and_project_id(credential_config_file=temp_file.name) mock_load_credentials_from_file.assert_called_once_with(temp_file.name, scopes=None) @@ -350,8 +351,9 @@ def test_get_credentials_using_credential_config_string(self, mock_load_credenti def test_get_credentials_using_credential_config_invalid_string(self, caplog): caplog.clear() - with pytest.raises(DefaultCredentialsError), caplog.at_level( - level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME + with ( + pytest.raises(DefaultCredentialsError), + caplog.at_level(level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME), ): get_credentials_and_project_id(credential_config_file="invalid json}}}}") assert "Getting connection using credential configuration string." in caplog.messages diff --git a/tests/providers/google/cloud/utils/test_datafusion.py b/providers/tests/google/cloud/utils/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/utils/test_datafusion.py rename to providers/tests/google/cloud/utils/test_datafusion.py diff --git a/tests/providers/google/cloud/utils/test_dataproc.py b/providers/tests/google/cloud/utils/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/utils/test_dataproc.py rename to providers/tests/google/cloud/utils/test_dataproc.py diff --git a/tests/providers/google/cloud/utils/test_external_token_supplier.py b/providers/tests/google/cloud/utils/test_external_token_supplier.py similarity index 100% rename from tests/providers/google/cloud/utils/test_external_token_supplier.py rename to providers/tests/google/cloud/utils/test_external_token_supplier.py diff --git a/tests/providers/google/cloud/utils/test_field_sanitizer.py b/providers/tests/google/cloud/utils/test_field_sanitizer.py similarity index 100% rename from tests/providers/google/cloud/utils/test_field_sanitizer.py rename to providers/tests/google/cloud/utils/test_field_sanitizer.py diff --git a/tests/providers/google/cloud/utils/test_field_validator.py b/providers/tests/google/cloud/utils/test_field_validator.py similarity index 100% rename from tests/providers/google/cloud/utils/test_field_validator.py rename to providers/tests/google/cloud/utils/test_field_validator.py diff --git a/tests/providers/google/cloud/utils/test_helpers.py b/providers/tests/google/cloud/utils/test_helpers.py similarity index 100% rename from tests/providers/google/cloud/utils/test_helpers.py rename to providers/tests/google/cloud/utils/test_helpers.py diff --git a/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py b/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py similarity index 100% rename from tests/providers/google/cloud/utils/test_mlengine_operator_utils.py rename to providers/tests/google/cloud/utils/test_mlengine_operator_utils.py diff --git a/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py similarity index 91% rename from tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py rename to providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py index 7d28cf1e6841..984c4847d942 100644 --- a/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py +++ b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py @@ -87,13 +87,11 @@ def test_run_should_fail_if_enc_fn_is_not_callable(self): ) def test_run_should_not_fail_with_valid_fn(self): - with mock.patch.object( - mlengine_prediction_summary.beam.pipeline, "PipelineOptions" - ) as pipeline_mock, mock.patch.object( - mlengine_prediction_summary.beam, "Pipeline" - ) as pipeline_obj_mock, mock.patch.object( - mlengine_prediction_summary.beam.io, "ReadFromText" - ) as io_mock: + with ( + mock.patch.object(mlengine_prediction_summary.beam.pipeline, "PipelineOptions") as pipeline_mock, + mock.patch.object(mlengine_prediction_summary.beam, "Pipeline") as pipeline_obj_mock, + mock.patch.object(mlengine_prediction_summary.beam.io, "ReadFromText") as io_mock, + ): def metric_function(): return 1 diff --git a/tests/providers/docker/decorators/__init__.py b/providers/tests/google/common/__init__.py similarity index 100% rename from tests/providers/docker/decorators/__init__.py rename to providers/tests/google/common/__init__.py diff --git a/tests/providers/elasticsearch/log/__init__.py b/providers/tests/google/common/auth_backend/__init__.py similarity index 100% rename from tests/providers/elasticsearch/log/__init__.py rename to providers/tests/google/common/auth_backend/__init__.py diff --git a/tests/providers/google/common/auth_backend/test_google_openid.py b/providers/tests/google/common/auth_backend/test_google_openid.py similarity index 95% rename from tests/providers/google/common/auth_backend/test_google_openid.py rename to providers/tests/google/common/auth_backend/test_google_openid.py index 260ae0d6fb5e..67b0ff2003d2 100644 --- a/tests/providers/google/common/auth_backend/test_google_openid.py +++ b/providers/tests/google/common/auth_backend/test_google_openid.py @@ -22,10 +22,11 @@ from google.auth.exceptions import GoogleAuthError from airflow.www.app import create_app -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="module") diff --git a/tests/providers/fab/auth_manager/api/auth/__init__.py b/providers/tests/google/common/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/auth/__init__.py rename to providers/tests/google/common/hooks/__init__.py diff --git a/tests/providers/google/common/hooks/test_base_google.py b/providers/tests/google/common/hooks/test_base_google.py similarity index 99% rename from tests/providers/google/common/hooks/test_base_google.py rename to providers/tests/google/common/hooks/test_base_google.py index 2b67b8844ecb..a4317c7e8b9e 100644 --- a/tests/providers/google/common/hooks/test_base_google.py +++ b/providers/tests/google/common/hooks/test_base_google.py @@ -38,7 +38,8 @@ from airflow.providers.google.cloud.utils.credentials_provider import _DEFAULT_SCOPES from airflow.providers.google.common.hooks import base_google as hook from airflow.providers.google.common.hooks.base_google import GoogleBaseHook, is_refresh_credentials_exception -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id default_creds_available = True default_project = None diff --git a/tests/providers/google/common/hooks/test_discovery_api.py b/providers/tests/google/common/hooks/test_discovery_api.py similarity index 100% rename from tests/providers/google/common/hooks/test_discovery_api.py rename to providers/tests/google/common/hooks/test_discovery_api.py diff --git a/tests/providers/google/common/test_deprecated.py b/providers/tests/google/common/test_deprecated.py similarity index 100% rename from tests/providers/google/common/test_deprecated.py rename to providers/tests/google/common/test_deprecated.py diff --git a/tests/providers/fab/auth_manager/api/auth/backend/__init__.py b/providers/tests/google/common/utils/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/auth/backend/__init__.py rename to providers/tests/google/common/utils/__init__.py diff --git a/tests/providers/google/common/utils/test_id_token_credentials.py b/providers/tests/google/common/utils/test_id_token_credentials.py similarity index 100% rename from tests/providers/google/common/utils/test_id_token_credentials.py rename to providers/tests/google/common/utils/test_id_token_credentials.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/__init__.py b/providers/tests/google/firebase/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api_endpoints/__init__.py rename to providers/tests/google/firebase/__init__.py diff --git a/tests/providers/fab/auth_manager/cli_commands/__init__.py b/providers/tests/google/firebase/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/cli_commands/__init__.py rename to providers/tests/google/firebase/hooks/__init__.py diff --git a/tests/providers/google/firebase/hooks/test_firestore.py b/providers/tests/google/firebase/hooks/test_firestore.py similarity index 99% rename from tests/providers/google/firebase/hooks/test_firestore.py rename to providers/tests/google/firebase/hooks/test_firestore.py index e6c17105a60f..b7308c5f9b9c 100644 --- a/tests/providers/google/firebase/hooks/test_firestore.py +++ b/providers/tests/google/firebase/hooks/test_firestore.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.firebase.hooks.firestore import CloudFirestoreHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/fab/auth_manager/decorators/__init__.py b/providers/tests/google/firebase/operators/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/decorators/__init__.py rename to providers/tests/google/firebase/operators/__init__.py diff --git a/tests/providers/google/firebase/operators/test_firestore.py b/providers/tests/google/firebase/operators/test_firestore.py similarity index 100% rename from tests/providers/google/firebase/operators/test_firestore.py rename to providers/tests/google/firebase/operators/test_firestore.py diff --git a/tests/providers/ftp/sensors/__init__.py b/providers/tests/google/leveldb/__init__.py similarity index 100% rename from tests/providers/ftp/sensors/__init__.py rename to providers/tests/google/leveldb/__init__.py diff --git a/tests/providers/google/cloud/utils/__init__.py b/providers/tests/google/leveldb/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/utils/__init__.py rename to providers/tests/google/leveldb/hooks/__init__.py diff --git a/tests/providers/google/leveldb/hooks/test_leveldb.py b/providers/tests/google/leveldb/hooks/test_leveldb.py similarity index 100% rename from tests/providers/google/leveldb/hooks/test_leveldb.py rename to providers/tests/google/leveldb/hooks/test_leveldb.py diff --git a/tests/providers/google/leveldb/__init__.py b/providers/tests/google/leveldb/operators/__init__.py similarity index 100% rename from tests/providers/google/leveldb/__init__.py rename to providers/tests/google/leveldb/operators/__init__.py diff --git a/tests/providers/google/leveldb/operators/test_leveldb.py b/providers/tests/google/leveldb/operators/test_leveldb.py similarity index 100% rename from tests/providers/google/leveldb/operators/test_leveldb.py rename to providers/tests/google/leveldb/operators/test_leveldb.py diff --git a/tests/providers/fab/auth_manager/security_manager/__init__.py b/providers/tests/google/marketing_platform/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/security_manager/__init__.py rename to providers/tests/google/marketing_platform/__init__.py diff --git a/tests/providers/facebook/__init__.py b/providers/tests/google/marketing_platform/hooks/__init__.py similarity index 100% rename from tests/providers/facebook/__init__.py rename to providers/tests/google/marketing_platform/hooks/__init__.py diff --git a/tests/providers/google/marketing_platform/hooks/test_analytics.py b/providers/tests/google/marketing_platform/hooks/test_analytics.py similarity index 96% rename from tests/providers/google/marketing_platform/hooks/test_analytics.py rename to providers/tests/google/marketing_platform/hooks/test_analytics.py index e88e8fe862a3..9944e2a9947a 100644 --- a/tests/providers/google/marketing_platform/hooks/test_analytics.py +++ b/providers/tests/google/marketing_platform/hooks/test_analytics.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.marketing_platform.hooks.analytics import GoogleAnalyticsHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id WEB_PROPERTY_AD_WORDS_LINK_ID = "AAIIRRFFLLOOWW" WEB_PROPERTY_ID = "web_property_id" @@ -38,10 +39,13 @@ class TestGoogleAnalyticsHook: def setup_method(self): - with mock.patch( - "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__", - new=mock_base_gcp_hook_default_project_id, - ), warnings.catch_warnings(): + with ( + mock.patch( + "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__", + new=mock_base_gcp_hook_default_project_id, + ), + warnings.catch_warnings(), + ): warnings.simplefilter("ignore", AirflowProviderDeprecationWarning) self.hook = GoogleAnalyticsHook(API_VERSION, GCP_CONN_ID) diff --git a/tests/providers/google/marketing_platform/hooks/test_analytics_admin.py b/providers/tests/google/marketing_platform/hooks/test_analytics_admin.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_analytics_admin.py rename to providers/tests/google/marketing_platform/hooks/test_analytics_admin.py index 81a5210d499a..9887dac4db24 100644 --- a/tests/providers/google/marketing_platform/hooks/test_analytics_admin.py +++ b/providers/tests/google/marketing_platform/hooks/test_analytics_admin.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.analytics_admin import GoogleAnalyticsAdminHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "test_gcp_conn_id" IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"] diff --git a/tests/providers/google/marketing_platform/hooks/test_campaign_manager.py b/providers/tests/google/marketing_platform/hooks/test_campaign_manager.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_campaign_manager.py rename to providers/tests/google/marketing_platform/hooks/test_campaign_manager.py index 2e7cd821b3c2..0c16e1979592 100644 --- a/tests/providers/google/marketing_platform/hooks/test_campaign_manager.py +++ b/providers/tests/google/marketing_platform/hooks/test_campaign_manager.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v4" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/marketing_platform/hooks/test_display_video.py b/providers/tests/google/marketing_platform/hooks/test_display_video.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_display_video.py rename to providers/tests/google/marketing_platform/hooks/test_display_video.py index c6eae2aa8f9b..2c476428e89f 100644 --- a/tests/providers/google/marketing_platform/hooks/test_display_video.py +++ b/providers/tests/google/marketing_platform/hooks/test_display_video.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v2" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/marketing_platform/hooks/test_search_ads.py b/providers/tests/google/marketing_platform/hooks/test_search_ads.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_search_ads.py rename to providers/tests/google/marketing_platform/hooks/test_search_ads.py index 1fc08237f145..c3b585364cea 100644 --- a/tests/providers/google/marketing_platform/hooks/test_search_ads.py +++ b/providers/tests/google/marketing_platform/hooks/test_search_ads.py @@ -25,7 +25,8 @@ GoogleSearchAdsHook, GoogleSearchAdsReportingHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "google_cloud_default" API_VERSION = "v0" diff --git a/tests/providers/google/leveldb/hooks/__init__.py b/providers/tests/google/marketing_platform/links/__init__.py similarity index 100% rename from tests/providers/google/leveldb/hooks/__init__.py rename to providers/tests/google/marketing_platform/links/__init__.py diff --git a/tests/providers/google/marketing_platform/links/test_analytics_admin.py b/providers/tests/google/marketing_platform/links/test_analytics_admin.py similarity index 100% rename from tests/providers/google/marketing_platform/links/test_analytics_admin.py rename to providers/tests/google/marketing_platform/links/test_analytics_admin.py diff --git a/tests/providers/facebook/ads/__init__.py b/providers/tests/google/marketing_platform/operators/__init__.py similarity index 100% rename from tests/providers/facebook/ads/__init__.py rename to providers/tests/google/marketing_platform/operators/__init__.py diff --git a/tests/providers/google/marketing_platform/operators/test_analytics.py b/providers/tests/google/marketing_platform/operators/test_analytics.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_analytics.py rename to providers/tests/google/marketing_platform/operators/test_analytics.py diff --git a/tests/providers/google/marketing_platform/operators/test_analytics_admin.py b/providers/tests/google/marketing_platform/operators/test_analytics_admin.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_analytics_admin.py rename to providers/tests/google/marketing_platform/operators/test_analytics_admin.py diff --git a/tests/providers/google/marketing_platform/operators/test_campaign_manager.py b/providers/tests/google/marketing_platform/operators/test_campaign_manager.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_campaign_manager.py rename to providers/tests/google/marketing_platform/operators/test_campaign_manager.py diff --git a/tests/providers/google/marketing_platform/operators/test_display_video.py b/providers/tests/google/marketing_platform/operators/test_display_video.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_display_video.py rename to providers/tests/google/marketing_platform/operators/test_display_video.py diff --git a/tests/providers/google/marketing_platform/operators/test_display_video_system.py b/providers/tests/google/marketing_platform/operators/test_display_video_system.py similarity index 91% rename from tests/providers/google/marketing_platform/operators/test_display_video_system.py rename to providers/tests/google/marketing_platform/operators/test_display_video_system.py index 10422e8ccb91..78f5d4ee021f 100644 --- a/tests/providers/google/marketing_platform/operators/test_display_video_system.py +++ b/providers/tests/google/marketing_platform/operators/test_display_video_system.py @@ -20,8 +20,13 @@ from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook from airflow.providers.google.marketing_platform.example_dags.example_display_video import BUCKET -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY -from tests.test_utils.gcp_system_helpers import MARKETING_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import ( + MARKETING_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY # Requires the following scope: SCOPES = [ diff --git a/tests/providers/google/marketing_platform/operators/test_search_ads.py b/providers/tests/google/marketing_platform/operators/test_search_ads.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_search_ads.py rename to providers/tests/google/marketing_platform/operators/test_search_ads.py diff --git a/tests/providers/facebook/ads/hooks/__init__.py b/providers/tests/google/marketing_platform/sensors/__init__.py similarity index 100% rename from tests/providers/facebook/ads/hooks/__init__.py rename to providers/tests/google/marketing_platform/sensors/__init__.py diff --git a/tests/providers/google/marketing_platform/sensors/test_campaign_manager.py b/providers/tests/google/marketing_platform/sensors/test_campaign_manager.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/test_campaign_manager.py rename to providers/tests/google/marketing_platform/sensors/test_campaign_manager.py diff --git a/tests/providers/google/marketing_platform/sensors/test_display_video.py b/providers/tests/google/marketing_platform/sensors/test_display_video.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/test_display_video.py rename to providers/tests/google/marketing_platform/sensors/test_display_video.py diff --git a/tests/providers/google/leveldb/operators/__init__.py b/providers/tests/google/suite/__init__.py similarity index 100% rename from tests/providers/google/leveldb/operators/__init__.py rename to providers/tests/google/suite/__init__.py diff --git a/tests/providers/google/marketing_platform/links/__init__.py b/providers/tests/google/suite/hooks/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/links/__init__.py rename to providers/tests/google/suite/hooks/__init__.py diff --git a/tests/providers/google/suite/hooks/test_calendar.py b/providers/tests/google/suite/hooks/test_calendar.py similarity index 98% rename from tests/providers/google/suite/hooks/test_calendar.py rename to providers/tests/google/suite/hooks/test_calendar.py index e072043f6ea5..dfcf3e10626f 100644 --- a/tests/providers/google/suite/hooks/test_calendar.py +++ b/providers/tests/google/suite/hooks/test_calendar.py @@ -24,7 +24,8 @@ from unittest import mock from airflow.providers.google.suite.hooks.calendar import GoogleCalendarHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "api_version" GCP_CONN_ID = "test" diff --git a/tests/providers/google/suite/hooks/test_drive.py b/providers/tests/google/suite/hooks/test_drive.py similarity index 99% rename from tests/providers/google/suite/hooks/test_drive.py rename to providers/tests/google/suite/hooks/test_drive.py index 00423f263401..a6fb0391d2c8 100644 --- a/tests/providers/google/suite/hooks/test_drive.py +++ b/providers/tests/google/suite/hooks/test_drive.py @@ -22,7 +22,8 @@ import pytest from airflow.providers.google.suite.hooks.drive import GoogleDriveHook -from tests.providers.google.cloud.utils.base_gcp_mock import GCP_CONNECTION_WITH_PROJECT_ID + +from providers.tests.google.cloud.utils.base_gcp_mock import GCP_CONNECTION_WITH_PROJECT_ID @pytest.mark.db_test diff --git a/tests/providers/google/suite/hooks/test_sheets.py b/providers/tests/google/suite/hooks/test_sheets.py similarity index 99% rename from tests/providers/google/suite/hooks/test_sheets.py rename to providers/tests/google/suite/hooks/test_sheets.py index ca3159d7d1c9..1be75426a336 100644 --- a/tests/providers/google/suite/hooks/test_sheets.py +++ b/providers/tests/google/suite/hooks/test_sheets.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.suite.hooks.sheets import GSheetsHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "test" SPREADSHEET_ID = "1234567890" diff --git a/tests/providers/google/suite/__init__.py b/providers/tests/google/suite/operators/__init__.py similarity index 100% rename from tests/providers/google/suite/__init__.py rename to providers/tests/google/suite/operators/__init__.py diff --git a/tests/providers/google/suite/operators/test_sheets.py b/providers/tests/google/suite/operators/test_sheets.py similarity index 100% rename from tests/providers/google/suite/operators/test_sheets.py rename to providers/tests/google/suite/operators/test_sheets.py diff --git a/tests/providers/google/suite/hooks/__init__.py b/providers/tests/google/suite/sensors/__init__.py similarity index 100% rename from tests/providers/google/suite/hooks/__init__.py rename to providers/tests/google/suite/sensors/__init__.py diff --git a/tests/providers/google/suite/sensors/test_drive.py b/providers/tests/google/suite/sensors/test_drive.py similarity index 100% rename from tests/providers/google/suite/sensors/test_drive.py rename to providers/tests/google/suite/sensors/test_drive.py diff --git a/tests/providers/ftp/operators/__init__.py b/providers/tests/google/suite/transfers/__init__.py similarity index 100% rename from tests/providers/ftp/operators/__init__.py rename to providers/tests/google/suite/transfers/__init__.py diff --git a/tests/providers/google/suite/transfers/test_gcs_to_gdrive.py b/providers/tests/google/suite/transfers/test_gcs_to_gdrive.py similarity index 100% rename from tests/providers/google/suite/transfers/test_gcs_to_gdrive.py rename to providers/tests/google/suite/transfers/test_gcs_to_gdrive.py diff --git a/tests/providers/google/suite/transfers/test_gcs_to_sheets.py b/providers/tests/google/suite/transfers/test_gcs_to_sheets.py similarity index 100% rename from tests/providers/google/suite/transfers/test_gcs_to_sheets.py rename to providers/tests/google/suite/transfers/test_gcs_to_sheets.py diff --git a/tests/providers/google/suite/transfers/test_local_to_drive.py b/providers/tests/google/suite/transfers/test_local_to_drive.py similarity index 100% rename from tests/providers/google/suite/transfers/test_local_to_drive.py rename to providers/tests/google/suite/transfers/test_local_to_drive.py diff --git a/tests/providers/google/suite/transfers/test_sql_to_sheets.py b/providers/tests/google/suite/transfers/test_sql_to_sheets.py similarity index 100% rename from tests/providers/google/suite/transfers/test_sql_to_sheets.py rename to providers/tests/google/suite/transfers/test_sql_to_sheets.py diff --git a/tests/providers/google/test_go_module.py b/providers/tests/google/test_go_module.py similarity index 100% rename from tests/providers/google/test_go_module.py rename to providers/tests/google/test_go_module.py diff --git a/tests/providers/google/suite/operators/__init__.py b/providers/tests/grpc/__init__.py similarity index 100% rename from tests/providers/google/suite/operators/__init__.py rename to providers/tests/grpc/__init__.py diff --git a/tests/providers/google/suite/sensors/__init__.py b/providers/tests/grpc/hooks/__init__.py similarity index 100% rename from tests/providers/google/suite/sensors/__init__.py rename to providers/tests/grpc/hooks/__init__.py diff --git a/tests/providers/grpc/hooks/test_grpc.py b/providers/tests/grpc/hooks/test_grpc.py similarity index 100% rename from tests/providers/grpc/hooks/test_grpc.py rename to providers/tests/grpc/hooks/test_grpc.py diff --git a/tests/providers/grpc/__init__.py b/providers/tests/grpc/operators/__init__.py similarity index 100% rename from tests/providers/grpc/__init__.py rename to providers/tests/grpc/operators/__init__.py diff --git a/tests/providers/grpc/operators/test_grpc.py b/providers/tests/grpc/operators/test_grpc.py similarity index 100% rename from tests/providers/grpc/operators/test_grpc.py rename to providers/tests/grpc/operators/test_grpc.py diff --git a/tests/providers/github/__init__.py b/providers/tests/hashicorp/__init__.py similarity index 100% rename from tests/providers/github/__init__.py rename to providers/tests/hashicorp/__init__.py diff --git a/tests/providers/github/hooks/__init__.py b/providers/tests/hashicorp/_internal_client/__init__.py similarity index 100% rename from tests/providers/github/hooks/__init__.py rename to providers/tests/hashicorp/_internal_client/__init__.py diff --git a/tests/providers/hashicorp/_internal_client/test_vault_client.py b/providers/tests/hashicorp/_internal_client/test_vault_client.py similarity index 100% rename from tests/providers/hashicorp/_internal_client/test_vault_client.py rename to providers/tests/hashicorp/_internal_client/test_vault_client.py diff --git a/tests/providers/github/operators/__init__.py b/providers/tests/hashicorp/hooks/__init__.py similarity index 100% rename from tests/providers/github/operators/__init__.py rename to providers/tests/hashicorp/hooks/__init__.py diff --git a/tests/providers/hashicorp/hooks/test_vault.py b/providers/tests/hashicorp/hooks/test_vault.py similarity index 99% rename from tests/providers/hashicorp/hooks/test_vault.py rename to providers/tests/hashicorp/hooks/test_vault.py index 1880be99749e..442a656d2006 100644 --- a/tests/providers/hashicorp/hooks/test_vault.py +++ b/providers/tests/hashicorp/hooks/test_vault.py @@ -26,7 +26,8 @@ from airflow.configuration import AirflowConfigParser from airflow.exceptions import AirflowConfigException from airflow.providers.hashicorp.hooks.vault import VaultHook -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestVaultHook: diff --git a/tests/providers/github/sensors/__init__.py b/providers/tests/hashicorp/secrets/__init__.py similarity index 100% rename from tests/providers/github/sensors/__init__.py rename to providers/tests/hashicorp/secrets/__init__.py diff --git a/tests/providers/hashicorp/secrets/test_vault.py b/providers/tests/hashicorp/secrets/test_vault.py similarity index 100% rename from tests/providers/hashicorp/secrets/test_vault.py rename to providers/tests/hashicorp/secrets/test_vault.py diff --git a/tests/providers/grpc/hooks/__init__.py b/providers/tests/http/__init__.py similarity index 100% rename from tests/providers/grpc/hooks/__init__.py rename to providers/tests/http/__init__.py diff --git a/tests/providers/grpc/operators/__init__.py b/providers/tests/http/hooks/__init__.py similarity index 100% rename from tests/providers/grpc/operators/__init__.py rename to providers/tests/http/hooks/__init__.py diff --git a/tests/providers/http/hooks/test_http.py b/providers/tests/http/hooks/test_http.py similarity index 95% rename from tests/providers/http/hooks/test_http.py rename to providers/tests/http/hooks/test_http.py index 5b631df8a4d6..e09fd2d034e6 100644 --- a/tests/providers/http/hooks/test_http.py +++ b/providers/tests/http/hooks/test_http.py @@ -487,11 +487,15 @@ def test_default_auth_not_initialized(self, auth): auth.assert_not_called() def test_keep_alive_enabled(self): - with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port - ), mock.patch( - "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" - ) as tcp_keep_alive_send, mock.patch("requests.adapters.HTTPAdapter.send") as http_send: + with ( + mock.patch( + "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port + ), + mock.patch( + "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" + ) as tcp_keep_alive_send, + mock.patch("requests.adapters.HTTPAdapter.send") as http_send, + ): hook = HttpHook(method="GET") response = Response() response.status_code = HTTPStatus.OK @@ -502,11 +506,15 @@ def test_keep_alive_enabled(self): http_send.assert_not_called() def test_keep_alive_disabled(self): - with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port - ), mock.patch( - "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" - ) as tcp_keep_alive_send, mock.patch("requests.adapters.HTTPAdapter.send") as http_send: + with ( + mock.patch( + "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port + ), + mock.patch( + "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" + ) as tcp_keep_alive_send, + mock.patch("requests.adapters.HTTPAdapter.send") as http_send, + ): hook = HttpHook(method="GET", tcp_keep_alive=False) response = Response() response.status_code = HTTPStatus.OK @@ -536,9 +544,12 @@ async def test_do_api_call_async_non_retryable_error(self, aioresponse): hook = HttpAsyncHook(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=400) - with pytest.raises(AirflowException, match="400:Bad Request"), mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + with ( + pytest.raises(AirflowException, match="400:Bad Request"), + mock.patch.dict( + "os.environ", + AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + ), ): await hook.run(endpoint="non_existent_endpoint") @@ -549,9 +560,12 @@ async def test_do_api_call_async_retryable_error(self, caplog, aioresponse): hook = HttpAsyncHook(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=500, repeat=True) - with pytest.raises(AirflowException, match="500:Internal Server Error"), mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + with ( + pytest.raises(AirflowException, match="500:Internal Server Error"), + mock.patch.dict( + "os.environ", + AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + ), ): await hook.run(endpoint="non_existent_endpoint") @@ -700,8 +714,9 @@ def get_empty_conn(conn_id: str = "http_default"): return Connection(conn_id=conn_id, conn_type="http") hook = HttpAsyncHook() - with mock.patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_empty_conn), mock.patch( - "aiohttp.ClientSession.post", new_callable=mock.AsyncMock - ) as mocked_function: + with ( + mock.patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_empty_conn), + mock.patch("aiohttp.ClientSession.post", new_callable=mock.AsyncMock) as mocked_function, + ): await hook.run("test.com:8080/v1/test") assert mocked_function.call_args.args[0] == "http://test.com:8080/v1/test" diff --git a/tests/providers/http/__init__.py b/providers/tests/http/operators/__init__.py similarity index 100% rename from tests/providers/http/__init__.py rename to providers/tests/http/operators/__init__.py diff --git a/tests/providers/http/operators/test_http.py b/providers/tests/http/operators/test_http.py similarity index 100% rename from tests/providers/http/operators/test_http.py rename to providers/tests/http/operators/test_http.py diff --git a/tests/providers/http/hooks/__init__.py b/providers/tests/http/sensors/__init__.py similarity index 100% rename from tests/providers/http/hooks/__init__.py rename to providers/tests/http/sensors/__init__.py diff --git a/tests/providers/http/sensors/test_http.py b/providers/tests/http/sensors/test_http.py similarity index 100% rename from tests/providers/http/sensors/test_http.py rename to providers/tests/http/sensors/test_http.py diff --git a/tests/providers/http/operators/__init__.py b/providers/tests/http/triggers/__init__.py similarity index 100% rename from tests/providers/http/operators/__init__.py rename to providers/tests/http/triggers/__init__.py diff --git a/tests/providers/http/triggers/test_http.py b/providers/tests/http/triggers/test_http.py similarity index 100% rename from tests/providers/http/triggers/test_http.py rename to providers/tests/http/triggers/test_http.py diff --git a/tests/providers/http/sensors/__init__.py b/providers/tests/imap/__init__.py similarity index 100% rename from tests/providers/http/sensors/__init__.py rename to providers/tests/imap/__init__.py diff --git a/tests/providers/http/triggers/__init__.py b/providers/tests/imap/hooks/__init__.py similarity index 100% rename from tests/providers/http/triggers/__init__.py rename to providers/tests/imap/hooks/__init__.py diff --git a/tests/providers/imap/hooks/test_imap.py b/providers/tests/imap/hooks/test_imap.py similarity index 99% rename from tests/providers/imap/hooks/test_imap.py rename to providers/tests/imap/hooks/test_imap.py index 2430d3f66488..2971cfd1924c 100644 --- a/tests/providers/imap/hooks/test_imap.py +++ b/providers/tests/imap/hooks/test_imap.py @@ -27,7 +27,8 @@ from airflow.models import Connection from airflow.providers.imap.hooks.imap import ImapHook from airflow.utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/imap/__init__.py b/providers/tests/imap/sensors/__init__.py similarity index 100% rename from tests/providers/imap/__init__.py rename to providers/tests/imap/sensors/__init__.py diff --git a/tests/providers/imap/sensors/test_imap_attachment.py b/providers/tests/imap/sensors/test_imap_attachment.py similarity index 100% rename from tests/providers/imap/sensors/test_imap_attachment.py rename to providers/tests/imap/sensors/test_imap_attachment.py diff --git a/tests/providers/google/__init__.py b/providers/tests/influxdb/__init__.py similarity index 100% rename from tests/providers/google/__init__.py rename to providers/tests/influxdb/__init__.py diff --git a/tests/providers/google/ads/__init__.py b/providers/tests/influxdb/hooks/__init__.py similarity index 100% rename from tests/providers/google/ads/__init__.py rename to providers/tests/influxdb/hooks/__init__.py diff --git a/tests/providers/influxdb/hooks/test_influxdb.py b/providers/tests/influxdb/hooks/test_influxdb.py similarity index 100% rename from tests/providers/influxdb/hooks/test_influxdb.py rename to providers/tests/influxdb/hooks/test_influxdb.py diff --git a/tests/providers/google/ads/hooks/__init__.py b/providers/tests/influxdb/operators/__init__.py similarity index 100% rename from tests/providers/google/ads/hooks/__init__.py rename to providers/tests/influxdb/operators/__init__.py diff --git a/tests/providers/influxdb/operators/test_influxdb.py b/providers/tests/influxdb/operators/test_influxdb.py similarity index 100% rename from tests/providers/influxdb/operators/test_influxdb.py rename to providers/tests/influxdb/operators/test_influxdb.py diff --git a/tests/providers/google/ads/operators/__init__.py b/providers/tests/integration/__init__.py similarity index 100% rename from tests/providers/google/ads/operators/__init__.py rename to providers/tests/integration/__init__.py diff --git a/tests/providers/google/ads/transfers/__init__.py b/providers/tests/integration/apache/__init__.py similarity index 100% rename from tests/providers/google/ads/transfers/__init__.py rename to providers/tests/integration/apache/__init__.py diff --git a/tests/providers/google/cloud/__init__.py b/providers/tests/integration/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/google/cloud/__init__.py rename to providers/tests/integration/apache/cassandra/__init__.py diff --git a/tests/providers/google/cloud/_internal_client/__init__.py b/providers/tests/integration/apache/cassandra/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/_internal_client/__init__.py rename to providers/tests/integration/apache/cassandra/hooks/__init__.py diff --git a/tests/integration/providers/apache/cassandra/hooks/test_cassandra.py b/providers/tests/integration/apache/cassandra/hooks/test_cassandra.py similarity index 100% rename from tests/integration/providers/apache/cassandra/hooks/test_cassandra.py rename to providers/tests/integration/apache/cassandra/hooks/test_cassandra.py diff --git a/tests/providers/google/cloud/hooks/__init__.py b/providers/tests/integration/apache/drill/__init__.py similarity index 100% rename from tests/providers/google/cloud/hooks/__init__.py rename to providers/tests/integration/apache/drill/__init__.py diff --git a/tests/providers/google/cloud/hooks/vertex_ai/__init__.py b/providers/tests/integration/apache/drill/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/hooks/vertex_ai/__init__.py rename to providers/tests/integration/apache/drill/hooks/__init__.py diff --git a/tests/integration/providers/apache/drill/hooks/test_drill.py b/providers/tests/integration/apache/drill/hooks/test_drill.py similarity index 100% rename from tests/integration/providers/apache/drill/hooks/test_drill.py rename to providers/tests/integration/apache/drill/hooks/test_drill.py diff --git a/tests/providers/google/cloud/links/__init__.py b/providers/tests/integration/apache/drill/operators/__init__.py similarity index 100% rename from tests/providers/google/cloud/links/__init__.py rename to providers/tests/integration/apache/drill/operators/__init__.py diff --git a/tests/providers/google/cloud/log/__init__.py b/providers/tests/integration/apache/hive/__init__.py similarity index 100% rename from tests/providers/google/cloud/log/__init__.py rename to providers/tests/integration/apache/hive/__init__.py diff --git a/tests/providers/google/cloud/openlineage/__init__.py b/providers/tests/integration/apache/hive/transfers/__init__.py similarity index 100% rename from tests/providers/google/cloud/openlineage/__init__.py rename to providers/tests/integration/apache/hive/transfers/__init__.py diff --git a/tests/integration/providers/apache/hive/transfers/test_mssql_to_hive.py b/providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py similarity index 100% rename from tests/integration/providers/apache/hive/transfers/test_mssql_to_hive.py rename to providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py diff --git a/tests/providers/google/cloud/operators/__init__.py b/providers/tests/integration/apache/kafka/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/__init__.py rename to providers/tests/integration/apache/kafka/__init__.py diff --git a/tests/providers/google/cloud/operators/source/__init__.py b/providers/tests/integration/apache/kafka/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/source/__init__.py rename to providers/tests/integration/apache/kafka/hooks/__init__.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_admin_client.py b/providers/tests/integration/apache/kafka/hooks/test_admin_client.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_admin_client.py rename to providers/tests/integration/apache/kafka/hooks/test_admin_client.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_consumer.py b/providers/tests/integration/apache/kafka/hooks/test_consumer.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_consumer.py rename to providers/tests/integration/apache/kafka/hooks/test_consumer.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_producer.py b/providers/tests/integration/apache/kafka/hooks/test_producer.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_producer.py rename to providers/tests/integration/apache/kafka/hooks/test_producer.py diff --git a/tests/providers/google/cloud/operators/source/source_prefix/__init__.py b/providers/tests/integration/apache/kafka/operators/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/source/source_prefix/__init__.py rename to providers/tests/integration/apache/kafka/operators/__init__.py diff --git a/tests/integration/providers/apache/kafka/operators/test_consume.py b/providers/tests/integration/apache/kafka/operators/test_consume.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/test_consume.py rename to providers/tests/integration/apache/kafka/operators/test_consume.py diff --git a/tests/integration/providers/apache/kafka/operators/test_produce.py b/providers/tests/integration/apache/kafka/operators/test_produce.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/test_produce.py rename to providers/tests/integration/apache/kafka/operators/test_produce.py diff --git a/tests/providers/google/cloud/operators/vertex_ai/__init__.py b/providers/tests/integration/apache/kafka/sensors/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/vertex_ai/__init__.py rename to providers/tests/integration/apache/kafka/sensors/__init__.py diff --git a/tests/providers/google/cloud/secrets/__init__.py b/providers/tests/integration/apache/kafka/triggers/__init__.py similarity index 100% rename from tests/providers/google/cloud/secrets/__init__.py rename to providers/tests/integration/apache/kafka/triggers/__init__.py diff --git a/tests/integration/providers/apache/kafka/triggers/test_await_message.py b/providers/tests/integration/apache/kafka/triggers/test_await_message.py similarity index 100% rename from tests/integration/providers/apache/kafka/triggers/test_await_message.py rename to providers/tests/integration/apache/kafka/triggers/test_await_message.py diff --git a/tests/providers/google/cloud/sensors/__init__.py b/providers/tests/integration/apache/pinot/__init__.py similarity index 100% rename from tests/providers/google/cloud/sensors/__init__.py rename to providers/tests/integration/apache/pinot/__init__.py diff --git a/tests/providers/google/cloud/transfers/__init__.py b/providers/tests/integration/apache/pinot/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/transfers/__init__.py rename to providers/tests/integration/apache/pinot/hooks/__init__.py diff --git a/tests/integration/providers/apache/pinot/hooks/test_pinot.py b/providers/tests/integration/apache/pinot/hooks/test_pinot.py similarity index 100% rename from tests/integration/providers/apache/pinot/hooks/test_pinot.py rename to providers/tests/integration/apache/pinot/hooks/test_pinot.py diff --git a/tests/providers/google/cloud/triggers/__init__.py b/providers/tests/integration/google/__init__.py similarity index 100% rename from tests/providers/google/cloud/triggers/__init__.py rename to providers/tests/integration/google/__init__.py diff --git a/tests/providers/google/common/__init__.py b/providers/tests/integration/google/cloud/__init__.py similarity index 100% rename from tests/providers/google/common/__init__.py rename to providers/tests/integration/google/cloud/__init__.py diff --git a/tests/providers/google/common/auth_backend/__init__.py b/providers/tests/integration/google/cloud/transfers/__init__.py similarity index 100% rename from tests/providers/google/common/auth_backend/__init__.py rename to providers/tests/integration/google/cloud/transfers/__init__.py diff --git a/tests/integration/providers/google/cloud/transfers/test_bigquery_to_mssql.py b/providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_bigquery_to_mssql.py rename to providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py diff --git a/tests/integration/providers/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_mssql_to_gcs.py rename to providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py diff --git a/tests/integration/providers/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_trino_to_gcs.py rename to providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py diff --git a/tests/providers/google/common/hooks/__init__.py b/providers/tests/integration/microsoft/__init__.py similarity index 100% rename from tests/providers/google/common/hooks/__init__.py rename to providers/tests/integration/microsoft/__init__.py diff --git a/tests/providers/google/common/utils/__init__.py b/providers/tests/integration/microsoft/mssql/__init__.py similarity index 100% rename from tests/providers/google/common/utils/__init__.py rename to providers/tests/integration/microsoft/mssql/__init__.py diff --git a/tests/providers/google/datasets/__init__.py b/providers/tests/integration/microsoft/mssql/hooks/__init__.py similarity index 100% rename from tests/providers/google/datasets/__init__.py rename to providers/tests/integration/microsoft/mssql/hooks/__init__.py diff --git a/tests/integration/providers/microsoft/mssql/hooks/test_mssql.py b/providers/tests/integration/microsoft/mssql/hooks/test_mssql.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/hooks/test_mssql.py rename to providers/tests/integration/microsoft/mssql/hooks/test_mssql.py diff --git a/tests/providers/google/firebase/__init__.py b/providers/tests/integration/mongo/__init__.py similarity index 100% rename from tests/providers/google/firebase/__init__.py rename to providers/tests/integration/mongo/__init__.py diff --git a/tests/providers/google/firebase/hooks/__init__.py b/providers/tests/integration/mongo/sensors/__init__.py similarity index 100% rename from tests/providers/google/firebase/hooks/__init__.py rename to providers/tests/integration/mongo/sensors/__init__.py diff --git a/tests/integration/providers/mongo/sensors/test_mongo.py b/providers/tests/integration/mongo/sensors/test_mongo.py similarity index 100% rename from tests/integration/providers/mongo/sensors/test_mongo.py rename to providers/tests/integration/mongo/sensors/test_mongo.py diff --git a/tests/providers/google/firebase/operators/__init__.py b/providers/tests/integration/openlineage/__init__.py similarity index 100% rename from tests/providers/google/firebase/operators/__init__.py rename to providers/tests/integration/openlineage/__init__.py diff --git a/tests/providers/google/marketing_platform/__init__.py b/providers/tests/integration/openlineage/operators/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/__init__.py rename to providers/tests/integration/openlineage/operators/__init__.py diff --git a/tests/providers/google/marketing_platform/hooks/__init__.py b/providers/tests/integration/qdrant/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/hooks/__init__.py rename to providers/tests/integration/qdrant/__init__.py diff --git a/tests/providers/google/marketing_platform/operators/__init__.py b/providers/tests/integration/qdrant/hooks/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/__init__.py rename to providers/tests/integration/qdrant/hooks/__init__.py diff --git a/tests/integration/providers/qdrant/hooks/test_qdrant.py b/providers/tests/integration/qdrant/hooks/test_qdrant.py similarity index 100% rename from tests/integration/providers/qdrant/hooks/test_qdrant.py rename to providers/tests/integration/qdrant/hooks/test_qdrant.py diff --git a/tests/providers/google/marketing_platform/sensors/__init__.py b/providers/tests/integration/qdrant/operators/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/__init__.py rename to providers/tests/integration/qdrant/operators/__init__.py diff --git a/tests/integration/providers/qdrant/operators/test_qdrant_ingest.py b/providers/tests/integration/qdrant/operators/test_qdrant_ingest.py similarity index 100% rename from tests/integration/providers/qdrant/operators/test_qdrant_ingest.py rename to providers/tests/integration/qdrant/operators/test_qdrant_ingest.py diff --git a/tests/providers/google/suite/transfers/__init__.py b/providers/tests/integration/redis/__init__.py similarity index 100% rename from tests/providers/google/suite/transfers/__init__.py rename to providers/tests/integration/redis/__init__.py diff --git a/tests/providers/hashicorp/__init__.py b/providers/tests/integration/redis/hooks/__init__.py similarity index 100% rename from tests/providers/hashicorp/__init__.py rename to providers/tests/integration/redis/hooks/__init__.py diff --git a/tests/integration/providers/redis/hooks/test_redis.py b/providers/tests/integration/redis/hooks/test_redis.py similarity index 100% rename from tests/integration/providers/redis/hooks/test_redis.py rename to providers/tests/integration/redis/hooks/test_redis.py diff --git a/tests/providers/hashicorp/_internal_client/__init__.py b/providers/tests/integration/redis/operators/__init__.py similarity index 100% rename from tests/providers/hashicorp/_internal_client/__init__.py rename to providers/tests/integration/redis/operators/__init__.py diff --git a/tests/integration/providers/redis/operators/test_redis_publish.py b/providers/tests/integration/redis/operators/test_redis_publish.py similarity index 100% rename from tests/integration/providers/redis/operators/test_redis_publish.py rename to providers/tests/integration/redis/operators/test_redis_publish.py diff --git a/tests/providers/hashicorp/hooks/__init__.py b/providers/tests/integration/redis/sensors/__init__.py similarity index 100% rename from tests/providers/hashicorp/hooks/__init__.py rename to providers/tests/integration/redis/sensors/__init__.py diff --git a/tests/integration/providers/redis/sensors/test_redis_key.py b/providers/tests/integration/redis/sensors/test_redis_key.py similarity index 100% rename from tests/integration/providers/redis/sensors/test_redis_key.py rename to providers/tests/integration/redis/sensors/test_redis_key.py diff --git a/tests/integration/providers/redis/sensors/test_redis_pub_sub.py b/providers/tests/integration/redis/sensors/test_redis_pub_sub.py similarity index 100% rename from tests/integration/providers/redis/sensors/test_redis_pub_sub.py rename to providers/tests/integration/redis/sensors/test_redis_pub_sub.py diff --git a/tests/providers/hashicorp/secrets/__init__.py b/providers/tests/integration/trino/__init__.py similarity index 100% rename from tests/providers/hashicorp/secrets/__init__.py rename to providers/tests/integration/trino/__init__.py diff --git a/tests/providers/influxdb/__init__.py b/providers/tests/integration/trino/hooks/__init__.py similarity index 100% rename from tests/providers/influxdb/__init__.py rename to providers/tests/integration/trino/hooks/__init__.py diff --git a/tests/integration/providers/trino/hooks/test_trino.py b/providers/tests/integration/trino/hooks/test_trino.py similarity index 100% rename from tests/integration/providers/trino/hooks/test_trino.py rename to providers/tests/integration/trino/hooks/test_trino.py diff --git a/tests/providers/influxdb/hooks/__init__.py b/providers/tests/integration/ydb/__init__.py similarity index 100% rename from tests/providers/influxdb/hooks/__init__.py rename to providers/tests/integration/ydb/__init__.py diff --git a/tests/providers/influxdb/operators/__init__.py b/providers/tests/integration/ydb/hooks/__init__.py similarity index 100% rename from tests/providers/influxdb/operators/__init__.py rename to providers/tests/integration/ydb/hooks/__init__.py diff --git a/tests/providers/jenkins/hooks/__init__.py b/providers/tests/integration/ydb/operators/__init__.py similarity index 100% rename from tests/providers/jenkins/hooks/__init__.py rename to providers/tests/integration/ydb/operators/__init__.py diff --git a/tests/integration/providers/ydb/operators/test_ydb.py b/providers/tests/integration/ydb/operators/test_ydb.py similarity index 100% rename from tests/integration/providers/ydb/operators/test_ydb.py rename to providers/tests/integration/ydb/operators/test_ydb.py diff --git a/tests/providers/imap/hooks/__init__.py b/providers/tests/jdbc/__init__.py similarity index 100% rename from tests/providers/imap/hooks/__init__.py rename to providers/tests/jdbc/__init__.py diff --git a/tests/providers/imap/sensors/__init__.py b/providers/tests/jdbc/hooks/__init__.py similarity index 100% rename from tests/providers/imap/sensors/__init__.py rename to providers/tests/jdbc/hooks/__init__.py diff --git a/tests/providers/jdbc/hooks/test_jdbc.py b/providers/tests/jdbc/hooks/test_jdbc.py similarity index 97% rename from tests/providers/jdbc/hooks/test_jdbc.py rename to providers/tests/jdbc/hooks/test_jdbc.py index f26a9d7ffb5b..cfb27934d86d 100644 --- a/tests/providers/jdbc/hooks/test_jdbc.py +++ b/providers/tests/jdbc/hooks/test_jdbc.py @@ -157,9 +157,10 @@ def test_driver_extra_works_when_allow_driver_extra(self): assert hook.driver_class == "Blah driver class" def test_default_driver_set(self): - with patch.object(JdbcHook, "default_driver_path", "Blah driver path") as _, patch.object( - JdbcHook, "default_driver_class", "Blah driver class" - ) as _: + with ( + patch.object(JdbcHook, "default_driver_path", "Blah driver path") as _, + patch.object(JdbcHook, "default_driver_class", "Blah driver class") as _, + ): hook = get_hook() assert hook.driver_path == "Blah driver path" assert hook.driver_class == "Blah driver class" diff --git a/tests/providers/jdbc/__init__.py b/providers/tests/jdbc/operators/__init__.py similarity index 100% rename from tests/providers/jdbc/__init__.py rename to providers/tests/jdbc/operators/__init__.py diff --git a/tests/providers/jdbc/operators/test_jdbc.py b/providers/tests/jdbc/operators/test_jdbc.py similarity index 100% rename from tests/providers/jdbc/operators/test_jdbc.py rename to providers/tests/jdbc/operators/test_jdbc.py diff --git a/tests/providers/jdbc/hooks/__init__.py b/providers/tests/jenkins/__init__.py similarity index 100% rename from tests/providers/jdbc/hooks/__init__.py rename to providers/tests/jenkins/__init__.py diff --git a/tests/providers/microsoft/azure/log/__init__.py b/providers/tests/jenkins/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/log/__init__.py rename to providers/tests/jenkins/hooks/__init__.py diff --git a/tests/providers/jenkins/hooks/test_jenkins.py b/providers/tests/jenkins/hooks/test_jenkins.py similarity index 100% rename from tests/providers/jenkins/hooks/test_jenkins.py rename to providers/tests/jenkins/hooks/test_jenkins.py diff --git a/tests/providers/jdbc/operators/__init__.py b/providers/tests/jenkins/operators/__init__.py similarity index 100% rename from tests/providers/jdbc/operators/__init__.py rename to providers/tests/jenkins/operators/__init__.py diff --git a/tests/providers/jenkins/operators/test_jenkins_job_trigger.py b/providers/tests/jenkins/operators/test_jenkins_job_trigger.py similarity index 85% rename from tests/providers/jenkins/operators/test_jenkins_job_trigger.py rename to providers/tests/jenkins/operators/test_jenkins_job_trigger.py index d21cdbedc007..2d740da1fe9f 100644 --- a/tests/providers/jenkins/operators/test_jenkins_job_trigger.py +++ b/providers/tests/jenkins/operators/test_jenkins_job_trigger.py @@ -46,13 +46,16 @@ def test_execute(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -86,13 +89,16 @@ def test_execute_job_polling_loop(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -124,13 +130,16 @@ def test_execute_job_failure(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -182,13 +191,16 @@ def test_allowed_jenkins_states(self, state, allowed_jenkins_states, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers", - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers", + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -246,13 +258,16 @@ def test_allowed_jenkins_states_failure(self, state, allowed_jenkins_states, moc hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, diff --git a/tests/providers/jenkins/__init__.py b/providers/tests/jenkins/sensors/__init__.py similarity index 100% rename from tests/providers/jenkins/__init__.py rename to providers/tests/jenkins/sensors/__init__.py diff --git a/tests/providers/jenkins/sensors/test_jenkins.py b/providers/tests/jenkins/sensors/test_jenkins.py similarity index 100% rename from tests/providers/jenkins/sensors/test_jenkins.py rename to providers/tests/jenkins/sensors/test_jenkins.py diff --git a/tests/providers/jenkins/operators/__init__.py b/providers/tests/microsoft/__init__.py similarity index 100% rename from tests/providers/jenkins/operators/__init__.py rename to providers/tests/microsoft/__init__.py diff --git a/tests/providers/jenkins/sensors/__init__.py b/providers/tests/microsoft/azure/__init__.py similarity index 100% rename from tests/providers/jenkins/sensors/__init__.py rename to providers/tests/microsoft/azure/__init__.py diff --git a/tests/providers/microsoft/azure/base.py b/providers/tests/microsoft/azure/base.py similarity index 91% rename from tests/providers/microsoft/azure/base.py rename to providers/tests/microsoft/azure/base.py index cad6c1449fdc..98c0a59867ea 100644 --- a/tests/providers/microsoft/azure/base.py +++ b/providers/tests/microsoft/azure/base.py @@ -26,7 +26,8 @@ from airflow.exceptions import TaskDeferred from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook -from tests.providers.microsoft.conftest import get_airflow_connection, mock_context + +from providers.tests.microsoft.conftest import get_airflow_connection, mock_context if TYPE_CHECKING: from airflow.models import Operator @@ -39,9 +40,10 @@ def teardown_method(self, method): @contextmanager def patch_hook_and_request_adapter(self, response): - with patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection - ), patch.object(HttpxRequestAdapter, "get_http_response_message") as mock_get_http_response: + with ( + patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection), + patch.object(HttpxRequestAdapter, "get_http_response_message") as mock_get_http_response, + ): if isinstance(response, Exception): mock_get_http_response.side_effect = response else: diff --git a/tests/providers/microsoft/__init__.py b/providers/tests/microsoft/azure/fs/__init__.py similarity index 100% rename from tests/providers/microsoft/__init__.py rename to providers/tests/microsoft/azure/fs/__init__.py diff --git a/tests/providers/microsoft/azure/fs/test_adls.py b/providers/tests/microsoft/azure/fs/test_adls.py similarity index 100% rename from tests/providers/microsoft/azure/fs/test_adls.py rename to providers/tests/microsoft/azure/fs/test_adls.py diff --git a/tests/providers/microsoft/azure/__init__.py b/providers/tests/microsoft/azure/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/__init__.py rename to providers/tests/microsoft/azure/hooks/__init__.py diff --git a/tests/providers/microsoft/azure/hooks/test_adx.py b/providers/tests/microsoft/azure/hooks/test_adx.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_adx.py rename to providers/tests/microsoft/azure/hooks/test_adx.py diff --git a/tests/providers/microsoft/azure/hooks/test_asb.py b/providers/tests/microsoft/azure/hooks/test_asb.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_asb.py rename to providers/tests/microsoft/azure/hooks/test_asb.py diff --git a/tests/providers/microsoft/azure/hooks/test_base_azure.py b/providers/tests/microsoft/azure/hooks/test_base_azure.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_base_azure.py rename to providers/tests/microsoft/azure/hooks/test_base_azure.py diff --git a/tests/providers/microsoft/azure/hooks/test_batch.py b/providers/tests/microsoft/azure/hooks/test_batch.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_batch.py rename to providers/tests/microsoft/azure/hooks/test_batch.py diff --git a/tests/providers/microsoft/azure/hooks/test_container_instance.py b/providers/tests/microsoft/azure/hooks/test_container_instance.py similarity index 95% rename from tests/providers/microsoft/azure/hooks/test_container_instance.py rename to providers/tests/microsoft/azure/hooks/test_container_instance.py index 09dfc167883b..38b2c743d57f 100644 --- a/tests/providers/microsoft/azure/hooks/test_container_instance.py +++ b/providers/tests/microsoft/azure/hooks/test_container_instance.py @@ -60,10 +60,13 @@ def setup_test_cases(self, create_mock_connection): ) self.resources = ResourceRequirements(requests=ResourceRequests(memory_in_gb="4", cpu="1")) self.hook = AzureContainerInstanceHook(azure_conn_id=mock_connection.conn_id) - with patch("azure.mgmt.containerinstance.ContainerInstanceManagementClient"), patch( - "azure.common.credentials.ServicePrincipalCredentials.__init__", - autospec=True, - return_value=None, + with ( + patch("azure.mgmt.containerinstance.ContainerInstanceManagementClient"), + patch( + "azure.common.credentials.ServicePrincipalCredentials.__init__", + autospec=True, + return_value=None, + ), ): yield diff --git a/tests/providers/microsoft/azure/hooks/test_container_registry.py b/providers/tests/microsoft/azure/hooks/test_container_registry.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_container_registry.py rename to providers/tests/microsoft/azure/hooks/test_container_registry.py diff --git a/tests/providers/microsoft/azure/hooks/test_container_volume.py b/providers/tests/microsoft/azure/hooks/test_container_volume.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_container_volume.py rename to providers/tests/microsoft/azure/hooks/test_container_volume.py diff --git a/tests/providers/microsoft/azure/hooks/test_cosmos.py b/providers/tests/microsoft/azure/hooks/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_cosmos.py rename to providers/tests/microsoft/azure/hooks/test_cosmos.py diff --git a/tests/providers/microsoft/azure/hooks/test_data_factory.py b/providers/tests/microsoft/azure/hooks/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_data_factory.py rename to providers/tests/microsoft/azure/hooks/test_data_factory.py diff --git a/tests/providers/microsoft/azure/hooks/test_data_lake.py b/providers/tests/microsoft/azure/hooks/test_data_lake.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_data_lake.py rename to providers/tests/microsoft/azure/hooks/test_data_lake.py diff --git a/tests/providers/microsoft/azure/hooks/test_fileshare.py b/providers/tests/microsoft/azure/hooks/test_fileshare.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_fileshare.py rename to providers/tests/microsoft/azure/hooks/test_fileshare.py diff --git a/tests/providers/microsoft/azure/hooks/test_msgraph.py b/providers/tests/microsoft/azure/hooks/test_msgraph.py similarity index 99% rename from tests/providers/microsoft/azure/hooks/test_msgraph.py rename to providers/tests/microsoft/azure/hooks/test_msgraph.py index 04e85525616b..0ecad98548b5 100644 --- a/tests/providers/microsoft/azure/hooks/test_msgraph.py +++ b/providers/tests/microsoft/azure/hooks/test_msgraph.py @@ -30,7 +30,8 @@ DefaultResponseHandler, KiotaRequestAdapterHook, ) -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.conftest import ( get_airflow_connection, load_file, load_json, diff --git a/tests/providers/microsoft/azure/hooks/test_powerbi.py b/providers/tests/microsoft/azure/hooks/test_powerbi.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_powerbi.py rename to providers/tests/microsoft/azure/hooks/test_powerbi.py diff --git a/tests/providers/microsoft/azure/hooks/test_synapse.py b/providers/tests/microsoft/azure/hooks/test_synapse.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_synapse.py rename to providers/tests/microsoft/azure/hooks/test_synapse.py diff --git a/tests/providers/microsoft/azure/hooks/test_synapse_pipeline.py b/providers/tests/microsoft/azure/hooks/test_synapse_pipeline.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_synapse_pipeline.py rename to providers/tests/microsoft/azure/hooks/test_synapse_pipeline.py diff --git a/tests/providers/microsoft/azure/hooks/test_wasb.py b/providers/tests/microsoft/azure/hooks/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_wasb.py rename to providers/tests/microsoft/azure/hooks/test_wasb.py diff --git a/tests/providers/microsoft/azure/resources/__init__.py b/providers/tests/microsoft/azure/log/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/resources/__init__.py rename to providers/tests/microsoft/azure/log/__init__.py diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py similarity index 98% rename from tests/providers/microsoft/azure/log/test_wasb_task_handler.py rename to providers/tests/microsoft/azure/log/test_wasb_task_handler.py index 7de68ec63a79..224961efe433 100644 --- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py +++ b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py @@ -29,8 +29,9 @@ from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/providers/microsoft/azure/fs/__init__.py b/providers/tests/microsoft/azure/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/fs/__init__.py rename to providers/tests/microsoft/azure/operators/__init__.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_create.py b/providers/tests/microsoft/azure/operators/test_adls_create.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_create.py rename to providers/tests/microsoft/azure/operators/test_adls_create.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_delete.py b/providers/tests/microsoft/azure/operators/test_adls_delete.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_delete.py rename to providers/tests/microsoft/azure/operators/test_adls_delete.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_list.py b/providers/tests/microsoft/azure/operators/test_adls_list.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_list.py rename to providers/tests/microsoft/azure/operators/test_adls_list.py diff --git a/tests/providers/microsoft/azure/operators/test_adx.py b/providers/tests/microsoft/azure/operators/test_adx.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adx.py rename to providers/tests/microsoft/azure/operators/test_adx.py diff --git a/tests/providers/microsoft/azure/operators/test_asb.py b/providers/tests/microsoft/azure/operators/test_asb.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_asb.py rename to providers/tests/microsoft/azure/operators/test_asb.py diff --git a/tests/providers/microsoft/azure/operators/test_batch.py b/providers/tests/microsoft/azure/operators/test_batch.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_batch.py rename to providers/tests/microsoft/azure/operators/test_batch.py diff --git a/tests/providers/microsoft/azure/operators/test_container_instances.py b/providers/tests/microsoft/azure/operators/test_container_instances.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_container_instances.py rename to providers/tests/microsoft/azure/operators/test_container_instances.py diff --git a/tests/providers/microsoft/azure/operators/test_cosmos.py b/providers/tests/microsoft/azure/operators/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_cosmos.py rename to providers/tests/microsoft/azure/operators/test_cosmos.py diff --git a/tests/providers/microsoft/azure/operators/test_data_factory.py b/providers/tests/microsoft/azure/operators/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_data_factory.py rename to providers/tests/microsoft/azure/operators/test_data_factory.py diff --git a/tests/providers/microsoft/azure/operators/test_msgraph.py b/providers/tests/microsoft/azure/operators/test_msgraph.py similarity index 98% rename from tests/providers/microsoft/azure/operators/test_msgraph.py rename to providers/tests/microsoft/azure/operators/test_msgraph.py index 754b653ccdaf..372152fe979e 100644 --- a/tests/providers/microsoft/azure/operators/test_msgraph.py +++ b/providers/tests/microsoft/azure/operators/test_msgraph.py @@ -25,8 +25,9 @@ from airflow.exceptions import AirflowException from airflow.providers.microsoft.azure.operators.msgraph import MSGraphAsyncOperator from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import ( load_file, load_json, mock_context, diff --git a/tests/providers/microsoft/azure/operators/test_powerbi.py b/providers/tests/microsoft/azure/operators/test_powerbi.py similarity index 97% rename from tests/providers/microsoft/azure/operators/test_powerbi.py rename to providers/tests/microsoft/azure/operators/test_powerbi.py index 35bb76f782ce..a9171cff39a6 100644 --- a/tests/providers/microsoft/azure/operators/test_powerbi.py +++ b/providers/tests/microsoft/azure/operators/test_powerbi.py @@ -30,8 +30,9 @@ from airflow.providers.microsoft.azure.operators.powerbi import PowerBIDatasetRefreshOperator from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.utils import timezone -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import get_airflow_connection, mock_context + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import get_airflow_connection, mock_context DEFAULT_CONNECTION_CLIENT_SECRET = "powerbi_conn_id" TASK_ID = "run_powerbi_operator" diff --git a/tests/providers/microsoft/azure/operators/test_synapse.py b/providers/tests/microsoft/azure/operators/test_synapse.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_synapse.py rename to providers/tests/microsoft/azure/operators/test_synapse.py diff --git a/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py b/providers/tests/microsoft/azure/operators/test_wasb_delete_blob.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py rename to providers/tests/microsoft/azure/operators/test_wasb_delete_blob.py diff --git a/tests/providers/microsoft/azure/secrets/__init__.py b/providers/tests/microsoft/azure/resources/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/secrets/__init__.py rename to providers/tests/microsoft/azure/resources/__init__.py diff --git a/tests/providers/microsoft/azure/resources/dummy.pdf b/providers/tests/microsoft/azure/resources/dummy.pdf similarity index 100% rename from tests/providers/microsoft/azure/resources/dummy.pdf rename to providers/tests/microsoft/azure/resources/dummy.pdf diff --git a/tests/providers/microsoft/azure/resources/next_users.json b/providers/tests/microsoft/azure/resources/next_users.json similarity index 100% rename from tests/providers/microsoft/azure/resources/next_users.json rename to providers/tests/microsoft/azure/resources/next_users.json diff --git a/tests/providers/microsoft/azure/resources/status.json b/providers/tests/microsoft/azure/resources/status.json similarity index 100% rename from tests/providers/microsoft/azure/resources/status.json rename to providers/tests/microsoft/azure/resources/status.json diff --git a/tests/providers/microsoft/azure/resources/users.json b/providers/tests/microsoft/azure/resources/users.json similarity index 100% rename from tests/providers/microsoft/azure/resources/users.json rename to providers/tests/microsoft/azure/resources/users.json diff --git a/tests/providers/microsoft/azure/transfers/__init__.py b/providers/tests/microsoft/azure/secrets/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/__init__.py rename to providers/tests/microsoft/azure/secrets/__init__.py diff --git a/tests/providers/microsoft/azure/secrets/test_key_vault.py b/providers/tests/microsoft/azure/secrets/test_key_vault.py similarity index 100% rename from tests/providers/microsoft/azure/secrets/test_key_vault.py rename to providers/tests/microsoft/azure/secrets/test_key_vault.py diff --git a/tests/providers/microsoft/azure/hooks/__init__.py b/providers/tests/microsoft/azure/sensors/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/__init__.py rename to providers/tests/microsoft/azure/sensors/__init__.py diff --git a/tests/providers/microsoft/azure/sensors/test_cosmos.py b/providers/tests/microsoft/azure/sensors/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_cosmos.py rename to providers/tests/microsoft/azure/sensors/test_cosmos.py diff --git a/tests/providers/microsoft/azure/sensors/test_data_factory.py b/providers/tests/microsoft/azure/sensors/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_data_factory.py rename to providers/tests/microsoft/azure/sensors/test_data_factory.py diff --git a/tests/providers/microsoft/azure/sensors/test_msgraph.py b/providers/tests/microsoft/azure/sensors/test_msgraph.py similarity index 95% rename from tests/providers/microsoft/azure/sensors/test_msgraph.py rename to providers/tests/microsoft/azure/sensors/test_msgraph.py index e257984affb1..ba5ba3547886 100644 --- a/tests/providers/microsoft/azure/sensors/test_msgraph.py +++ b/providers/tests/microsoft/azure/sensors/test_msgraph.py @@ -20,8 +20,9 @@ from airflow.providers.microsoft.azure.sensors.msgraph import MSGraphSensor from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import load_json, mock_json_response + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import load_json, mock_json_response class TestMSGraphSensor(Base): diff --git a/tests/providers/microsoft/azure/sensors/test_wasb.py b/providers/tests/microsoft/azure/sensors/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_wasb.py rename to providers/tests/microsoft/azure/sensors/test_wasb.py diff --git a/tests/providers/microsoft/azure/test_utils.py b/providers/tests/microsoft/azure/test_utils.py similarity index 100% rename from tests/providers/microsoft/azure/test_utils.py rename to providers/tests/microsoft/azure/test_utils.py diff --git a/tests/providers/microsoft/azure/triggers/__init__.py b/providers/tests/microsoft/azure/transfers/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/__init__.py rename to providers/tests/microsoft/azure/transfers/__init__.py diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_adls.py b/providers/tests/microsoft/azure/transfers/test_local_to_adls.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_local_to_adls.py rename to providers/tests/microsoft/azure/transfers/test_local_to_adls.py diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_local_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_local_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_local_to_wasb.py diff --git a/tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py b/providers/tests/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py rename to providers/tests/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py diff --git a/tests/providers/microsoft/azure/transfers/test_s3_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_s3_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_s3_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_s3_to_wasb.py diff --git a/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_sftp_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_sftp_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_sftp_to_wasb.py diff --git a/tests/providers/microsoft/mssql/operators/__init__.py b/providers/tests/microsoft/azure/triggers/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/operators/__init__.py rename to providers/tests/microsoft/azure/triggers/__init__.py diff --git a/tests/providers/microsoft/azure/triggers/test_data_factory.py b/providers/tests/microsoft/azure/triggers/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/test_data_factory.py rename to providers/tests/microsoft/azure/triggers/test_data_factory.py diff --git a/tests/providers/microsoft/azure/triggers/test_msgraph.py b/providers/tests/microsoft/azure/triggers/test_msgraph.py similarity index 98% rename from tests/providers/microsoft/azure/triggers/test_msgraph.py rename to providers/tests/microsoft/azure/triggers/test_msgraph.py index 23085563cf8f..0784d8d83177 100644 --- a/tests/providers/microsoft/azure/triggers/test_msgraph.py +++ b/providers/tests/microsoft/azure/triggers/test_msgraph.py @@ -31,8 +31,9 @@ ResponseSerializer, ) from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import ( get_airflow_connection, load_file, load_json, diff --git a/tests/providers/microsoft/azure/triggers/test_powerbi.py b/providers/tests/microsoft/azure/triggers/test_powerbi.py similarity index 99% rename from tests/providers/microsoft/azure/triggers/test_powerbi.py rename to providers/tests/microsoft/azure/triggers/test_powerbi.py index c3276e258b3d..8f5a3e945ffc 100644 --- a/tests/providers/microsoft/azure/triggers/test_powerbi.py +++ b/providers/tests/microsoft/azure/triggers/test_powerbi.py @@ -25,7 +25,8 @@ from airflow.providers.microsoft.azure.hooks.powerbi import PowerBIDatasetRefreshStatus from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.conftest import get_airflow_connection + +from providers.tests.microsoft.conftest import get_airflow_connection POWERBI_CONN_ID = "powerbi_default" DATASET_ID = "dataset_id" diff --git a/tests/providers/microsoft/azure/triggers/test_wasb.py b/providers/tests/microsoft/azure/triggers/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/test_wasb.py rename to providers/tests/microsoft/azure/triggers/test_wasb.py diff --git a/tests/providers/microsoft/conftest.py b/providers/tests/microsoft/conftest.py similarity index 100% rename from tests/providers/microsoft/conftest.py rename to providers/tests/microsoft/conftest.py diff --git a/tests/providers/microsoft/azure/operators/__init__.py b/providers/tests/microsoft/mssql/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/operators/__init__.py rename to providers/tests/microsoft/mssql/__init__.py diff --git a/tests/providers/microsoft/azure/sensors/__init__.py b/providers/tests/microsoft/mssql/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/__init__.py rename to providers/tests/microsoft/mssql/hooks/__init__.py diff --git a/tests/providers/microsoft/mssql/hooks/test_mssql.py b/providers/tests/microsoft/mssql/hooks/test_mssql.py similarity index 99% rename from tests/providers/microsoft/mssql/hooks/test_mssql.py rename to providers/tests/microsoft/mssql/hooks/test_mssql.py index c629bf7b0d9a..1b43bb787835 100644 --- a/tests/providers/microsoft/mssql/hooks/test_mssql.py +++ b/providers/tests/microsoft/mssql/hooks/test_mssql.py @@ -23,7 +23,8 @@ import pytest from airflow.models import Connection -from tests.providers.microsoft.conftest import load_file + +from providers.tests.microsoft.conftest import load_file try: from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook diff --git a/tests/providers/mysql/assets/__init__.py b/providers/tests/microsoft/mssql/operators/__init__.py similarity index 100% rename from tests/providers/mysql/assets/__init__.py rename to providers/tests/microsoft/mssql/operators/__init__.py diff --git a/tests/providers/microsoft/mssql/operators/test_mssql.py b/providers/tests/microsoft/mssql/operators/test_mssql.py similarity index 100% rename from tests/providers/microsoft/mssql/operators/test_mssql.py rename to providers/tests/microsoft/mssql/operators/test_mssql.py diff --git a/tests/providers/microsoft/mssql/__init__.py b/providers/tests/microsoft/mssql/resources/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/__init__.py rename to providers/tests/microsoft/mssql/resources/__init__.py diff --git a/tests/providers/microsoft/mssql/resources/replace.sql b/providers/tests/microsoft/mssql/resources/replace.sql similarity index 100% rename from tests/providers/microsoft/mssql/resources/replace.sql rename to providers/tests/microsoft/mssql/resources/replace.sql diff --git a/tests/providers/microsoft/mssql/hooks/__init__.py b/providers/tests/microsoft/psrp/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/hooks/__init__.py rename to providers/tests/microsoft/psrp/__init__.py diff --git a/tests/providers/microsoft/mssql/resources/__init__.py b/providers/tests/microsoft/psrp/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/resources/__init__.py rename to providers/tests/microsoft/psrp/hooks/__init__.py diff --git a/tests/providers/microsoft/psrp/hooks/test_psrp.py b/providers/tests/microsoft/psrp/hooks/test_psrp.py similarity index 96% rename from tests/providers/microsoft/psrp/hooks/test_psrp.py rename to providers/tests/microsoft/psrp/hooks/test_psrp.py index fe271f5c39e5..1f56e67a7a68 100644 --- a/tests/providers/microsoft/psrp/hooks/test_psrp.py +++ b/providers/tests/microsoft/psrp/hooks/test_psrp.py @@ -139,13 +139,16 @@ def test_invoke(self, runspace_pool, powershell, ws_man, logging_level): on_output_callback = Mock() - with PsrpHook( - CONNECTION_ID, - runspace_options=runspace_options, - wsman_options=wsman_options, - on_output_callback=on_output_callback, - **options, - ) as hook, patch.object(type(hook), "log") as logger: + with ( + PsrpHook( + CONNECTION_ID, + runspace_options=runspace_options, + wsman_options=wsman_options, + on_output_callback=on_output_callback, + **options, + ) as hook, + patch.object(type(hook), "log") as logger, + ): error_match = "Process had one or more errors" with pytest.raises(AirflowException, match=error_match): # noqa: PT012 error happen on context exit with hook.invoke() as ps: diff --git a/tests/providers/microsoft/psrp/__init__.py b/providers/tests/microsoft/psrp/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/__init__.py rename to providers/tests/microsoft/psrp/operators/__init__.py diff --git a/tests/providers/microsoft/psrp/operators/test_psrp.py b/providers/tests/microsoft/psrp/operators/test_psrp.py similarity index 100% rename from tests/providers/microsoft/psrp/operators/test_psrp.py rename to providers/tests/microsoft/psrp/operators/test_psrp.py diff --git a/tests/providers/microsoft/psrp/hooks/__init__.py b/providers/tests/microsoft/winrm/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/hooks/__init__.py rename to providers/tests/microsoft/winrm/__init__.py diff --git a/tests/providers/microsoft/psrp/operators/__init__.py b/providers/tests/microsoft/winrm/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/operators/__init__.py rename to providers/tests/microsoft/winrm/hooks/__init__.py diff --git a/tests/providers/microsoft/winrm/hooks/test_winrm.py b/providers/tests/microsoft/winrm/hooks/test_winrm.py similarity index 100% rename from tests/providers/microsoft/winrm/hooks/test_winrm.py rename to providers/tests/microsoft/winrm/hooks/test_winrm.py diff --git a/tests/providers/microsoft/winrm/__init__.py b/providers/tests/microsoft/winrm/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/__init__.py rename to providers/tests/microsoft/winrm/operators/__init__.py diff --git a/tests/providers/microsoft/winrm/operators/test_winrm.py b/providers/tests/microsoft/winrm/operators/test_winrm.py similarity index 100% rename from tests/providers/microsoft/winrm/operators/test_winrm.py rename to providers/tests/microsoft/winrm/operators/test_winrm.py diff --git a/tests/providers/microsoft/winrm/hooks/__init__.py b/providers/tests/mongo/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/hooks/__init__.py rename to providers/tests/mongo/__init__.py diff --git a/tests/providers/microsoft/winrm/operators/__init__.py b/providers/tests/mongo/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/operators/__init__.py rename to providers/tests/mongo/hooks/__init__.py diff --git a/tests/providers/mongo/hooks/test_mongo.py b/providers/tests/mongo/hooks/test_mongo.py similarity index 99% rename from tests/providers/mongo/hooks/test_mongo.py rename to providers/tests/mongo/hooks/test_mongo.py index 86b49535fa49..78756b0cc489 100644 --- a/tests/providers/mongo/hooks/test_mongo.py +++ b/providers/tests/mongo/hooks/test_mongo.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowConfigException, AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.mongo.hooks.mongo import MongoHook -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json pytestmark = pytest.mark.db_test diff --git a/tests/providers/mongo/__init__.py b/providers/tests/mongo/sensors/__init__.py similarity index 100% rename from tests/providers/mongo/__init__.py rename to providers/tests/mongo/sensors/__init__.py diff --git a/tests/providers/mongo/sensors/test_mongo.py b/providers/tests/mongo/sensors/test_mongo.py similarity index 100% rename from tests/providers/mongo/sensors/test_mongo.py rename to providers/tests/mongo/sensors/test_mongo.py diff --git a/tests/providers/mongo/hooks/__init__.py b/providers/tests/mysql/__init__.py similarity index 100% rename from tests/providers/mongo/hooks/__init__.py rename to providers/tests/mysql/__init__.py diff --git a/tests/providers/mysql/transfers/__init__.py b/providers/tests/mysql/assets/__init__.py similarity index 100% rename from tests/providers/mysql/transfers/__init__.py rename to providers/tests/mysql/assets/__init__.py diff --git a/tests/providers/mysql/assets/test_mysql.py b/providers/tests/mysql/assets/test_mysql.py similarity index 100% rename from tests/providers/mysql/assets/test_mysql.py rename to providers/tests/mysql/assets/test_mysql.py diff --git a/tests/providers/mongo/sensors/__init__.py b/providers/tests/mysql/hooks/__init__.py similarity index 100% rename from tests/providers/mongo/sensors/__init__.py rename to providers/tests/mysql/hooks/__init__.py diff --git a/tests/providers/mysql/hooks/test_mysql.py b/providers/tests/mysql/hooks/test_mysql.py similarity index 99% rename from tests/providers/mysql/hooks/test_mysql.py rename to providers/tests/mysql/hooks/test_mysql.py index 48fc62fe2c22..23748ae9cac1 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/providers/tests/mysql/hooks/test_mysql.py @@ -37,7 +37,8 @@ from airflow.utils import timezone -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces SSL_DICT = {"cert": "/tmp/client-cert.pem", "ca": "/tmp/server-ca.pem", "key": "/tmp/client-key.pem"} diff --git a/tests/providers/mysql/hooks/test_mysql_connector_python.py b/providers/tests/mysql/hooks/test_mysql_connector_python.py similarity index 100% rename from tests/providers/mysql/hooks/test_mysql_connector_python.py rename to providers/tests/mysql/hooks/test_mysql_connector_python.py diff --git a/tests/providers/mysql/__init__.py b/providers/tests/mysql/operators/__init__.py similarity index 100% rename from tests/providers/mysql/__init__.py rename to providers/tests/mysql/operators/__init__.py diff --git a/tests/providers/mysql/operators/test_mysql.py b/providers/tests/mysql/operators/test_mysql.py similarity index 99% rename from tests/providers/mysql/operators/test_mysql.py rename to providers/tests/mysql/operators/test_mysql.py index 10a1fcc151a8..75f0aed2935f 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/providers/tests/mysql/operators/test_mysql.py @@ -34,7 +34,8 @@ from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.utils import timezone -from tests.providers.mysql.hooks.test_mysql import MySqlContext + +from providers.tests.mysql.hooks.test_mysql import MySqlContext DEFAULT_DATE = timezone.datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() diff --git a/tests/providers/odbc/__init__.py b/providers/tests/mysql/transfers/__init__.py similarity index 100% rename from tests/providers/odbc/__init__.py rename to providers/tests/mysql/transfers/__init__.py diff --git a/tests/providers/mysql/transfers/test_presto_to_mysql.py b/providers/tests/mysql/transfers/test_presto_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_presto_to_mysql.py rename to providers/tests/mysql/transfers/test_presto_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_s3_to_mysql.py b/providers/tests/mysql/transfers/test_s3_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_s3_to_mysql.py rename to providers/tests/mysql/transfers/test_s3_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_trino_to_mysql.py b/providers/tests/mysql/transfers/test_trino_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_trino_to_mysql.py rename to providers/tests/mysql/transfers/test_trino_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_vertica_to_mysql.py b/providers/tests/mysql/transfers/test_vertica_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_vertica_to_mysql.py rename to providers/tests/mysql/transfers/test_vertica_to_mysql.py diff --git a/tests/providers/mysql/hooks/__init__.py b/providers/tests/neo4j/__init__.py similarity index 100% rename from tests/providers/mysql/hooks/__init__.py rename to providers/tests/neo4j/__init__.py diff --git a/tests/providers/mysql/operators/__init__.py b/providers/tests/neo4j/hooks/__init__.py similarity index 100% rename from tests/providers/mysql/operators/__init__.py rename to providers/tests/neo4j/hooks/__init__.py diff --git a/tests/providers/neo4j/hooks/test_neo4j.py b/providers/tests/neo4j/hooks/test_neo4j.py similarity index 100% rename from tests/providers/neo4j/hooks/test_neo4j.py rename to providers/tests/neo4j/hooks/test_neo4j.py diff --git a/tests/providers/neo4j/__init__.py b/providers/tests/neo4j/operators/__init__.py similarity index 100% rename from tests/providers/neo4j/__init__.py rename to providers/tests/neo4j/operators/__init__.py diff --git a/tests/providers/neo4j/operators/test_neo4j.py b/providers/tests/neo4j/operators/test_neo4j.py similarity index 100% rename from tests/providers/neo4j/operators/test_neo4j.py rename to providers/tests/neo4j/operators/test_neo4j.py diff --git a/tests/providers/odbc/hooks/__init__.py b/providers/tests/odbc/__init__.py similarity index 100% rename from tests/providers/odbc/hooks/__init__.py rename to providers/tests/odbc/__init__.py diff --git a/tests/providers/openai/__init__.py b/providers/tests/odbc/hooks/__init__.py similarity index 100% rename from tests/providers/openai/__init__.py rename to providers/tests/odbc/hooks/__init__.py diff --git a/tests/providers/odbc/hooks/test_odbc.py b/providers/tests/odbc/hooks/test_odbc.py similarity index 99% rename from tests/providers/odbc/hooks/test_odbc.py rename to providers/tests/odbc/hooks/test_odbc.py index bddd2ffd996b..8f749aa4f765 100644 --- a/tests/providers/odbc/hooks/test_odbc.py +++ b/providers/tests/odbc/hooks/test_odbc.py @@ -28,7 +28,8 @@ import pytest from airflow.providers.odbc.hooks.odbc import OdbcHook -from tests.providers.common.sql.test_utils import mock_hook + +from providers.tests.common.sql.test_utils import mock_hook @pytest.fixture diff --git a/tests/providers/openai/hooks/__init__.py b/providers/tests/openai/__init__.py similarity index 100% rename from tests/providers/openai/hooks/__init__.py rename to providers/tests/openai/__init__.py diff --git a/tests/providers/openai/operators/__init__.py b/providers/tests/openai/hooks/__init__.py similarity index 100% rename from tests/providers/openai/operators/__init__.py rename to providers/tests/openai/hooks/__init__.py diff --git a/tests/providers/openai/hooks/test_openai.py b/providers/tests/openai/hooks/test_openai.py similarity index 100% rename from tests/providers/openai/hooks/test_openai.py rename to providers/tests/openai/hooks/test_openai.py diff --git a/tests/providers/openai/triggers/__init__.py b/providers/tests/openai/operators/__init__.py similarity index 100% rename from tests/providers/openai/triggers/__init__.py rename to providers/tests/openai/operators/__init__.py diff --git a/tests/providers/openai/operators/test_openai.py b/providers/tests/openai/operators/test_openai.py similarity index 100% rename from tests/providers/openai/operators/test_openai.py rename to providers/tests/openai/operators/test_openai.py diff --git a/tests/providers/openai/test_exceptions.py b/providers/tests/openai/test_exceptions.py similarity index 100% rename from tests/providers/openai/test_exceptions.py rename to providers/tests/openai/test_exceptions.py diff --git a/tests/providers/openlineage/__init__.py b/providers/tests/openai/triggers/__init__.py similarity index 100% rename from tests/providers/openlineage/__init__.py rename to providers/tests/openai/triggers/__init__.py diff --git a/tests/providers/openai/triggers/test_openai.py b/providers/tests/openai/triggers/test_openai.py similarity index 100% rename from tests/providers/openai/triggers/test_openai.py rename to providers/tests/openai/triggers/test_openai.py diff --git a/tests/providers/neo4j/hooks/__init__.py b/providers/tests/openfaas/__init__.py similarity index 100% rename from tests/providers/neo4j/hooks/__init__.py rename to providers/tests/openfaas/__init__.py diff --git a/tests/providers/neo4j/operators/__init__.py b/providers/tests/openfaas/hooks/__init__.py similarity index 100% rename from tests/providers/neo4j/operators/__init__.py rename to providers/tests/openfaas/hooks/__init__.py diff --git a/tests/providers/openfaas/hooks/test_openfaas.py b/providers/tests/openfaas/hooks/test_openfaas.py similarity index 100% rename from tests/providers/openfaas/hooks/test_openfaas.py rename to providers/tests/openfaas/hooks/test_openfaas.py diff --git a/tests/providers/openlineage/extractors/__init__.py b/providers/tests/openlineage/__init__.py similarity index 100% rename from tests/providers/openlineage/extractors/__init__.py rename to providers/tests/openlineage/__init__.py diff --git a/tests/providers/openlineage/plugins/__init__.py b/providers/tests/openlineage/extractors/__init__.py similarity index 100% rename from tests/providers/openlineage/plugins/__init__.py rename to providers/tests/openlineage/extractors/__init__.py diff --git a/tests/providers/openlineage/extractors/test_base.py b/providers/tests/openlineage/extractors/test_base.py similarity index 99% rename from tests/providers/openlineage/extractors/test_base.py rename to providers/tests/openlineage/extractors/test_base.py index 88234d317472..15c96ac67553 100644 --- a/tests/providers/openlineage/extractors/test_base.py +++ b/providers/tests/openlineage/extractors/test_base.py @@ -277,7 +277,7 @@ def test_extract_on_failure(task_state, is_airflow_2_10_or_higher, should_call_o @mock.patch("airflow.providers.openlineage.conf.custom_extractors") def test_extractors_env_var(custom_extractors): - custom_extractors.return_value = {"tests.providers.openlineage.extractors.test_base.ExampleExtractor"} + custom_extractors.return_value = {"providers.tests.openlineage.extractors.test_base.ExampleExtractor"} extractor = ExtractorManager().get_extractor_class(ExampleOperator(task_id="example")) assert extractor is ExampleExtractor diff --git a/tests/providers/openlineage/extractors/test_bash.py b/providers/tests/openlineage/extractors/test_bash.py similarity index 98% rename from tests/providers/openlineage/extractors/test_bash.py rename to providers/tests/openlineage/extractors/test_bash.py index fc862e5ee30b..d4fcdf7af2b7 100644 --- a/tests/providers/openlineage/extractors/test_bash.py +++ b/providers/tests/openlineage/extractors/test_bash.py @@ -27,7 +27,8 @@ from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.openlineage.extractors.bash import BashExtractor -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/extractors/test_manager.py b/providers/tests/openlineage/extractors/test_manager.py similarity index 99% rename from tests/providers/openlineage/extractors/test_manager.py rename to providers/tests/openlineage/extractors/test_manager.py index 601a45660484..6bbf303e3285 100644 --- a/tests/providers/openlineage/extractors/test_manager.py +++ b/providers/tests/openlineage/extractors/test_manager.py @@ -34,7 +34,8 @@ from airflow.providers.openlineage.extractors.manager import ExtractorManager from airflow.providers.openlineage.utils.utils import Asset from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/tests/providers/openlineage/extractors/test_python.py b/providers/tests/openlineage/extractors/test_python.py similarity index 98% rename from tests/providers/openlineage/extractors/test_python.py rename to providers/tests/openlineage/extractors/test_python.py index 44c5503b712d..ef4fc0b861e7 100644 --- a/tests/providers/openlineage/extractors/test_python.py +++ b/providers/tests/openlineage/extractors/test_python.py @@ -30,7 +30,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.operators.python import PythonOperator from airflow.providers.openlineage.extractors.python import PythonExtractor -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/log_config.py b/providers/tests/openlineage/log_config.py similarity index 100% rename from tests/providers/openlineage/log_config.py rename to providers/tests/openlineage/log_config.py diff --git a/tests/providers/openlineage/plugins/openlineage_configs/__init__.py b/providers/tests/openlineage/plugins/__init__.py similarity index 100% rename from tests/providers/openlineage/plugins/openlineage_configs/__init__.py rename to providers/tests/openlineage/plugins/__init__.py diff --git a/tests/providers/openlineage/utils/__init__.py b/providers/tests/openlineage/plugins/openlineage_configs/__init__.py similarity index 100% rename from tests/providers/openlineage/utils/__init__.py rename to providers/tests/openlineage/plugins/openlineage_configs/__init__.py diff --git a/tests/providers/openlineage/plugins/openlineage_configs/http.yaml b/providers/tests/openlineage/plugins/openlineage_configs/http.yaml similarity index 100% rename from tests/providers/openlineage/plugins/openlineage_configs/http.yaml rename to providers/tests/openlineage/plugins/openlineage_configs/http.yaml diff --git a/tests/providers/openlineage/plugins/test_adapter.py b/providers/tests/openlineage/plugins/test_adapter.py similarity index 99% rename from tests/providers/openlineage/plugins/test_adapter.py rename to providers/tests/openlineage/plugins/test_adapter.py index b01fe46fdca1..88f2250c638e 100644 --- a/tests/providers/openlineage/plugins/test_adapter.py +++ b/providers/tests/openlineage/plugins/test_adapter.py @@ -51,8 +51,9 @@ ) from airflow.providers.openlineage.utils.utils import get_airflow_job_facet from airflow.utils.task_group import TaskGroup -from tests.test_utils.compat import BashOperator -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/plugins/test_execution.py b/providers/tests/openlineage/plugins/test_execution.py similarity index 98% rename from tests/providers/openlineage/plugins/test_execution.py rename to providers/tests/openlineage/plugins/test_execution.py index 8c0bdd55a1f9..c308047b1bc0 100644 --- a/tests/providers/openlineage/plugins/test_execution.py +++ b/providers/tests/openlineage/plugins/test_execution.py @@ -36,8 +36,9 @@ from airflow.task.task_runner.standard_task_runner import StandardTaskRunner from airflow.utils import timezone from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/plugins/test_facets.py b/providers/tests/openlineage/plugins/test_facets.py similarity index 100% rename from tests/providers/openlineage/plugins/test_facets.py rename to providers/tests/openlineage/plugins/test_facets.py diff --git a/tests/providers/openlineage/plugins/test_listener.py b/providers/tests/openlineage/plugins/test_listener.py similarity index 99% rename from tests/providers/openlineage/plugins/test_listener.py rename to providers/tests/openlineage/plugins/test_listener.py index 57c0134f79d8..0c1651ccf027 100644 --- a/tests/providers/openlineage/plugins/test_listener.py +++ b/providers/tests/openlineage/plugins/test_listener.py @@ -38,8 +38,9 @@ from airflow.providers.openlineage.plugins.listener import OpenLineageListener from airflow.providers.openlineage.utils.selective_enable import disable_lineage, enable_lineage from airflow.utils.state import DagRunState, State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/plugins/test_macros.py b/providers/tests/openlineage/plugins/test_macros.py similarity index 100% rename from tests/providers/openlineage/plugins/test_macros.py rename to providers/tests/openlineage/plugins/test_macros.py diff --git a/tests/providers/openlineage/plugins/test_openlineage.py b/providers/tests/openlineage/plugins/test_openlineage.py similarity index 97% rename from tests/providers/openlineage/plugins/test_openlineage.py rename to providers/tests/openlineage/plugins/test_openlineage.py index dcb8198cecca..8685326a2943 100644 --- a/tests/providers/openlineage/plugins/test_openlineage.py +++ b/providers/tests/openlineage/plugins/test_openlineage.py @@ -23,8 +23,8 @@ import pytest -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.config import conf_vars +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.skipif( diff --git a/tests/providers/openlineage/plugins/test_utils.py b/providers/tests/openlineage/plugins/test_utils.py similarity index 99% rename from tests/providers/openlineage/plugins/test_utils.py rename to providers/tests/openlineage/plugins/test_utils.py index 65874a5ecebf..444839ccaef3 100644 --- a/tests/providers/openlineage/plugins/test_utils.py +++ b/providers/tests/openlineage/plugins/test_utils.py @@ -43,7 +43,8 @@ from airflow.utils import timezone from airflow.utils.log.secrets_masker import _secrets_masker from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/test_conf.py b/providers/tests/openlineage/test_conf.py similarity index 99% rename from tests/providers/openlineage/test_conf.py rename to providers/tests/openlineage/test_conf.py index f3c483d4ce14..7f78a6a4c2ee 100644 --- a/tests/providers/openlineage/test_conf.py +++ b/providers/tests/openlineage/test_conf.py @@ -38,7 +38,8 @@ selective_enable, transport, ) -from tests.test_utils.config import conf_vars, env_vars + +from dev.tests_common.test_utils.config import conf_vars, env_vars _CONFIG_SECTION = "openlineage" _VAR_CONFIG_PATH = "OPENLINEAGE_CONFIG" diff --git a/tests/providers/openlineage/test_sqlparser.py b/providers/tests/openlineage/test_sqlparser.py similarity index 100% rename from tests/providers/openlineage/test_sqlparser.py rename to providers/tests/openlineage/test_sqlparser.py diff --git a/tests/providers/opensearch/__init__.py b/providers/tests/openlineage/utils/__init__.py similarity index 100% rename from tests/providers/opensearch/__init__.py rename to providers/tests/openlineage/utils/__init__.py diff --git a/tests/providers/openlineage/utils/custom_facet_fixture.py b/providers/tests/openlineage/utils/custom_facet_fixture.py similarity index 100% rename from tests/providers/openlineage/utils/custom_facet_fixture.py rename to providers/tests/openlineage/utils/custom_facet_fixture.py diff --git a/tests/providers/openlineage/utils/test_selective_enable.py b/providers/tests/openlineage/utils/test_selective_enable.py similarity index 100% rename from tests/providers/openlineage/utils/test_selective_enable.py rename to providers/tests/openlineage/utils/test_selective_enable.py diff --git a/tests/providers/openlineage/utils/test_sql.py b/providers/tests/openlineage/utils/test_sql.py similarity index 100% rename from tests/providers/openlineage/utils/test_sql.py rename to providers/tests/openlineage/utils/test_sql.py diff --git a/tests/providers/openlineage/utils/test_utils.py b/providers/tests/openlineage/utils/test_utils.py similarity index 96% rename from tests/providers/openlineage/utils/test_utils.py rename to providers/tests/openlineage/utils/test_utils.py index 20eba76adeb1..6cf7904546ab 100644 --- a/tests/providers/openlineage/utils/test_utils.py +++ b/providers/tests/openlineage/utils/test_utils.py @@ -43,8 +43,9 @@ from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator +from dev.tests_common.test_utils.mock_operators import MockOperator BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash" if not AIRFLOW_V_2_10_PLUS: @@ -190,7 +191,7 @@ def test_get_fully_qualified_class_name_mapped_operator(): mapped = MockOperator.partial(task_id="task_2").expand(arg2=["a", "b", "c"]) assert isinstance(mapped, MappedOperator) mapped_op_path = get_fully_qualified_class_name(mapped) - assert mapped_op_path == "tests.test_utils.mock_operators.MockOperator" + assert mapped_op_path == "dev.tests_common.test_utils.mock_operators.MockOperator" def test_get_fully_qualified_class_name_bash_operator(): @@ -309,7 +310,7 @@ def sum_values(values: list[int]) -> int: "downstream_task_ids": [], }, "task": { - "operator": "tests.providers.openlineage.utils.test_utils.CustomOperatorForTest", + "operator": "providers.tests.openlineage.utils.test_utils.CustomOperatorForTest", "task_group": None, "emits_ol_events": True, "ui_color": CustomOperatorForTest.ui_color, @@ -337,7 +338,7 @@ def sum_values(values: list[int]) -> int: ], }, "task_1": { - "operator": "tests.providers.openlineage.utils.test_utils.CustomOperatorFromEmpty", + "operator": "providers.tests.openlineage.utils.test_utils.CustomOperatorFromEmpty", "task_group": None, "emits_ol_events": False, "ui_color": CustomOperatorFromEmpty.ui_color, @@ -406,7 +407,7 @@ def sum_values(values: list[int]) -> int: "emits_ol_events": True, "is_setup": False, "is_teardown": False, - "operator": "tests.providers.openlineage.utils.test_utils.TestMappedOperator", + "operator": "providers.tests.openlineage.utils.test_utils.TestMappedOperator", "task_group": None, "ui_color": "#fff", "ui_fgcolor": "#000", @@ -597,7 +598,7 @@ def test_get_user_provided_run_facets_with_no_function_definition(mock_custom_fa @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet"}, ) def test_get_user_provided_run_facets_with_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( @@ -615,7 +616,7 @@ def test_get_user_provided_run_facets_with_function_definition(mock_custom_facet @patch( "airflow.providers.openlineage.conf.custom_run_facets", return_value={ - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", }, ) def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_facet_funcs): @@ -635,9 +636,9 @@ def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_face "airflow.providers.openlineage.conf.custom_run_facets", return_value={ "invalid_function", - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", - "tests.providers.openlineage.utils.custom_facet_fixture.return_type_is_not_dict", - "tests.providers.openlineage.utils.custom_facet_fixture.get_another_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.return_type_is_not_dict", + "providers.tests.openlineage.utils.custom_facet_fixture.get_another_test_facet", }, ) def test_get_user_provided_run_facets_with_multiple_function_definition(mock_custom_facet_funcs): @@ -657,8 +658,8 @@ def test_get_user_provided_run_facets_with_multiple_function_definition(mock_cus @patch( "airflow.providers.openlineage.conf.custom_run_facets", return_value={ - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", - "tests.providers.openlineage.utils.custom_facet_fixture.get_duplicate_test_facet_key", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_duplicate_test_facet_key", }, ) def test_get_user_provided_run_facets_with_duplicate_facet_keys(mock_custom_facet_funcs): @@ -691,7 +692,7 @@ def test_get_user_provided_run_facets_with_invalid_function_definition(mock_cust @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.return_type_is_not_dict"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.return_type_is_not_dict"}, ) def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custom_facet_funcs): sample_ti = TaskInstance( @@ -706,7 +707,7 @@ def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custo @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.get_custom_facet_throws_exception"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.get_custom_facet_throws_exception"}, ) def test_get_user_provided_run_facets_with_exception(mock_custom_facet_funcs): sample_ti = TaskInstance( diff --git a/tests/providers/opensearch/hooks/__init__.py b/providers/tests/opensearch/__init__.py similarity index 100% rename from tests/providers/opensearch/hooks/__init__.py rename to providers/tests/opensearch/__init__.py diff --git a/tests/providers/opensearch/conftest.py b/providers/tests/opensearch/conftest.py similarity index 100% rename from tests/providers/opensearch/conftest.py rename to providers/tests/opensearch/conftest.py diff --git a/tests/providers/opensearch/log/__init__.py b/providers/tests/opensearch/hooks/__init__.py similarity index 100% rename from tests/providers/opensearch/log/__init__.py rename to providers/tests/opensearch/hooks/__init__.py diff --git a/tests/providers/opensearch/hooks/test_opensearch.py b/providers/tests/opensearch/hooks/test_opensearch.py similarity index 100% rename from tests/providers/opensearch/hooks/test_opensearch.py rename to providers/tests/opensearch/hooks/test_opensearch.py diff --git a/tests/providers/opensearch/operators/__init__.py b/providers/tests/opensearch/log/__init__.py similarity index 100% rename from tests/providers/opensearch/operators/__init__.py rename to providers/tests/opensearch/log/__init__.py diff --git a/tests/providers/opensearch/log/test_os_json_formatter.py b/providers/tests/opensearch/log/test_os_json_formatter.py similarity index 100% rename from tests/providers/opensearch/log/test_os_json_formatter.py rename to providers/tests/opensearch/log/test_os_json_formatter.py diff --git a/tests/providers/opensearch/log/test_os_response.py b/providers/tests/opensearch/log/test_os_response.py similarity index 100% rename from tests/providers/opensearch/log/test_os_response.py rename to providers/tests/opensearch/log/test_os_response.py diff --git a/tests/providers/opensearch/log/test_os_task_handler.py b/providers/tests/opensearch/log/test_os_task_handler.py similarity index 99% rename from tests/providers/opensearch/log/test_os_task_handler.py rename to providers/tests/opensearch/log/test_os_task_handler.py index d23249ba9e6d..1ffa0f525119 100644 --- a/tests/providers/opensearch/log/test_os_task_handler.py +++ b/providers/tests/opensearch/log/test_os_task_handler.py @@ -43,9 +43,10 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from tests.providers.opensearch.conftest import MockClient -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from providers.tests.opensearch.conftest import MockClient pytestmark = pytest.mark.db_test diff --git a/tests/providers/opsgenie/notifications/__init__.py b/providers/tests/opensearch/operators/__init__.py similarity index 100% rename from tests/providers/opsgenie/notifications/__init__.py rename to providers/tests/opensearch/operators/__init__.py diff --git a/tests/providers/opensearch/operators/test_opensearch.py b/providers/tests/opensearch/operators/test_opensearch.py similarity index 100% rename from tests/providers/opensearch/operators/test_opensearch.py rename to providers/tests/opensearch/operators/test_opensearch.py diff --git a/tests/providers/openfaas/__init__.py b/providers/tests/opsgenie/__init__.py similarity index 100% rename from tests/providers/openfaas/__init__.py rename to providers/tests/opsgenie/__init__.py diff --git a/tests/providers/openfaas/hooks/__init__.py b/providers/tests/opsgenie/hooks/__init__.py similarity index 100% rename from tests/providers/openfaas/hooks/__init__.py rename to providers/tests/opsgenie/hooks/__init__.py diff --git a/tests/providers/opsgenie/hooks/test_opsgenie.py b/providers/tests/opsgenie/hooks/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/hooks/test_opsgenie.py rename to providers/tests/opsgenie/hooks/test_opsgenie.py diff --git a/tests/providers/opsgenie/typing/__init__.py b/providers/tests/opsgenie/notifications/__init__.py similarity index 100% rename from tests/providers/opsgenie/typing/__init__.py rename to providers/tests/opsgenie/notifications/__init__.py diff --git a/tests/providers/opsgenie/notifications/test_opsgenie.py b/providers/tests/opsgenie/notifications/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/notifications/test_opsgenie.py rename to providers/tests/opsgenie/notifications/test_opsgenie.py diff --git a/tests/providers/opsgenie/__init__.py b/providers/tests/opsgenie/operators/__init__.py similarity index 100% rename from tests/providers/opsgenie/__init__.py rename to providers/tests/opsgenie/operators/__init__.py diff --git a/tests/providers/opsgenie/operators/test_opsgenie.py b/providers/tests/opsgenie/operators/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/operators/test_opsgenie.py rename to providers/tests/opsgenie/operators/test_opsgenie.py diff --git a/tests/providers/oracle/operators/__init__.py b/providers/tests/opsgenie/typing/__init__.py similarity index 100% rename from tests/providers/oracle/operators/__init__.py rename to providers/tests/opsgenie/typing/__init__.py diff --git a/tests/providers/opsgenie/typing/test_opsgenie.py b/providers/tests/opsgenie/typing/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/typing/test_opsgenie.py rename to providers/tests/opsgenie/typing/test_opsgenie.py diff --git a/tests/providers/opsgenie/hooks/__init__.py b/providers/tests/oracle/__init__.py similarity index 100% rename from tests/providers/opsgenie/hooks/__init__.py rename to providers/tests/oracle/__init__.py diff --git a/tests/providers/opsgenie/operators/__init__.py b/providers/tests/oracle/hooks/__init__.py similarity index 100% rename from tests/providers/opsgenie/operators/__init__.py rename to providers/tests/oracle/hooks/__init__.py diff --git a/tests/providers/oracle/hooks/test_oracle.py b/providers/tests/oracle/hooks/test_oracle.py similarity index 100% rename from tests/providers/oracle/hooks/test_oracle.py rename to providers/tests/oracle/hooks/test_oracle.py diff --git a/tests/providers/pagerduty/notifications/__init__.py b/providers/tests/oracle/operators/__init__.py similarity index 100% rename from tests/providers/pagerduty/notifications/__init__.py rename to providers/tests/oracle/operators/__init__.py diff --git a/tests/providers/oracle/operators/test_oracle.py b/providers/tests/oracle/operators/test_oracle.py similarity index 100% rename from tests/providers/oracle/operators/test_oracle.py rename to providers/tests/oracle/operators/test_oracle.py diff --git a/tests/providers/oracle/__init__.py b/providers/tests/oracle/transfers/__init__.py similarity index 100% rename from tests/providers/oracle/__init__.py rename to providers/tests/oracle/transfers/__init__.py diff --git a/tests/providers/oracle/transfers/test_oracle_to_oracle.py b/providers/tests/oracle/transfers/test_oracle_to_oracle.py similarity index 100% rename from tests/providers/oracle/transfers/test_oracle_to_oracle.py rename to providers/tests/oracle/transfers/test_oracle_to_oracle.py diff --git a/tests/providers/oracle/hooks/__init__.py b/providers/tests/pagerduty/__init__.py similarity index 100% rename from tests/providers/oracle/hooks/__init__.py rename to providers/tests/pagerduty/__init__.py diff --git a/tests/providers/oracle/transfers/__init__.py b/providers/tests/pagerduty/hooks/__init__.py similarity index 100% rename from tests/providers/oracle/transfers/__init__.py rename to providers/tests/pagerduty/hooks/__init__.py diff --git a/tests/providers/pagerduty/hooks/test_pagerduty.py b/providers/tests/pagerduty/hooks/test_pagerduty.py similarity index 100% rename from tests/providers/pagerduty/hooks/test_pagerduty.py rename to providers/tests/pagerduty/hooks/test_pagerduty.py diff --git a/tests/providers/pagerduty/hooks/test_pagerduty_events.py b/providers/tests/pagerduty/hooks/test_pagerduty_events.py similarity index 100% rename from tests/providers/pagerduty/hooks/test_pagerduty_events.py rename to providers/tests/pagerduty/hooks/test_pagerduty_events.py diff --git a/tests/providers/papermill/hooks/__init__.py b/providers/tests/pagerduty/notifications/__init__.py similarity index 100% rename from tests/providers/papermill/hooks/__init__.py rename to providers/tests/pagerduty/notifications/__init__.py diff --git a/tests/providers/pagerduty/notifications/test_pagerduty.py b/providers/tests/pagerduty/notifications/test_pagerduty.py similarity index 100% rename from tests/providers/pagerduty/notifications/test_pagerduty.py rename to providers/tests/pagerduty/notifications/test_pagerduty.py diff --git a/tests/providers/pagerduty/__init__.py b/providers/tests/papermill/__init__.py similarity index 100% rename from tests/providers/pagerduty/__init__.py rename to providers/tests/papermill/__init__.py diff --git a/tests/providers/pgvector/__init__.py b/providers/tests/papermill/hooks/__init__.py similarity index 100% rename from tests/providers/pgvector/__init__.py rename to providers/tests/papermill/hooks/__init__.py diff --git a/tests/providers/papermill/hooks/test_kernel.py b/providers/tests/papermill/hooks/test_kernel.py similarity index 100% rename from tests/providers/papermill/hooks/test_kernel.py rename to providers/tests/papermill/hooks/test_kernel.py diff --git a/tests/providers/pagerduty/hooks/__init__.py b/providers/tests/papermill/operators/__init__.py similarity index 100% rename from tests/providers/pagerduty/hooks/__init__.py rename to providers/tests/papermill/operators/__init__.py diff --git a/tests/providers/papermill/operators/test_papermill.py b/providers/tests/papermill/operators/test_papermill.py similarity index 100% rename from tests/providers/papermill/operators/test_papermill.py rename to providers/tests/papermill/operators/test_papermill.py diff --git a/tests/providers/pgvector/hooks/__init__.py b/providers/tests/pgvector/__init__.py similarity index 100% rename from tests/providers/pgvector/hooks/__init__.py rename to providers/tests/pgvector/__init__.py diff --git a/tests/providers/pgvector/operators/__init__.py b/providers/tests/pgvector/hooks/__init__.py similarity index 100% rename from tests/providers/pgvector/operators/__init__.py rename to providers/tests/pgvector/hooks/__init__.py diff --git a/tests/providers/pgvector/hooks/test_pgvector.py b/providers/tests/pgvector/hooks/test_pgvector.py similarity index 100% rename from tests/providers/pgvector/hooks/test_pgvector.py rename to providers/tests/pgvector/hooks/test_pgvector.py diff --git a/tests/providers/pinecone/__init__.py b/providers/tests/pgvector/operators/__init__.py similarity index 100% rename from tests/providers/pinecone/__init__.py rename to providers/tests/pgvector/operators/__init__.py diff --git a/tests/providers/pgvector/operators/test_pgvector.py b/providers/tests/pgvector/operators/test_pgvector.py similarity index 100% rename from tests/providers/pgvector/operators/test_pgvector.py rename to providers/tests/pgvector/operators/test_pgvector.py diff --git a/tests/providers/pinecone/hooks/__init__.py b/providers/tests/pinecone/__init__.py similarity index 100% rename from tests/providers/pinecone/hooks/__init__.py rename to providers/tests/pinecone/__init__.py diff --git a/tests/providers/pinecone/operators/__init__.py b/providers/tests/pinecone/hooks/__init__.py similarity index 100% rename from tests/providers/pinecone/operators/__init__.py rename to providers/tests/pinecone/hooks/__init__.py diff --git a/tests/providers/pinecone/hooks/test_pinecone.py b/providers/tests/pinecone/hooks/test_pinecone.py similarity index 100% rename from tests/providers/pinecone/hooks/test_pinecone.py rename to providers/tests/pinecone/hooks/test_pinecone.py diff --git a/tests/providers/postgres/assets/__init__.py b/providers/tests/pinecone/operators/__init__.py similarity index 100% rename from tests/providers/postgres/assets/__init__.py rename to providers/tests/pinecone/operators/__init__.py diff --git a/tests/providers/pinecone/operators/test_pinecone.py b/providers/tests/pinecone/operators/test_pinecone.py similarity index 100% rename from tests/providers/pinecone/operators/test_pinecone.py rename to providers/tests/pinecone/operators/test_pinecone.py diff --git a/tests/providers/papermill/__init__.py b/providers/tests/postgres/__init__.py similarity index 100% rename from tests/providers/papermill/__init__.py rename to providers/tests/postgres/__init__.py diff --git a/tests/providers/qdrant/__init__.py b/providers/tests/postgres/assets/__init__.py similarity index 100% rename from tests/providers/qdrant/__init__.py rename to providers/tests/postgres/assets/__init__.py diff --git a/tests/providers/postgres/assets/test_postgres.py b/providers/tests/postgres/assets/test_postgres.py similarity index 100% rename from tests/providers/postgres/assets/test_postgres.py rename to providers/tests/postgres/assets/test_postgres.py diff --git a/tests/providers/papermill/operators/__init__.py b/providers/tests/postgres/hooks/__init__.py similarity index 100% rename from tests/providers/papermill/operators/__init__.py rename to providers/tests/postgres/hooks/__init__.py diff --git a/tests/providers/postgres/hooks/test_postgres.py b/providers/tests/postgres/hooks/test_postgres.py similarity index 100% rename from tests/providers/postgres/hooks/test_postgres.py rename to providers/tests/postgres/hooks/test_postgres.py diff --git a/tests/providers/postgres/__init__.py b/providers/tests/postgres/operators/__init__.py similarity index 100% rename from tests/providers/postgres/__init__.py rename to providers/tests/postgres/operators/__init__.py diff --git a/tests/providers/postgres/operators/test_postgres.py b/providers/tests/postgres/operators/test_postgres.py similarity index 100% rename from tests/providers/postgres/operators/test_postgres.py rename to providers/tests/postgres/operators/test_postgres.py diff --git a/tests/providers/postgres/hooks/__init__.py b/providers/tests/presto/__init__.py similarity index 100% rename from tests/providers/postgres/hooks/__init__.py rename to providers/tests/presto/__init__.py diff --git a/tests/providers/postgres/operators/__init__.py b/providers/tests/presto/hooks/__init__.py similarity index 100% rename from tests/providers/postgres/operators/__init__.py rename to providers/tests/presto/hooks/__init__.py diff --git a/tests/providers/presto/hooks/test_presto.py b/providers/tests/presto/hooks/test_presto.py similarity index 100% rename from tests/providers/presto/hooks/test_presto.py rename to providers/tests/presto/hooks/test_presto.py diff --git a/tests/providers/presto/__init__.py b/providers/tests/presto/transfers/__init__.py similarity index 100% rename from tests/providers/presto/__init__.py rename to providers/tests/presto/transfers/__init__.py diff --git a/tests/providers/presto/transfers/test_gcs_to_presto.py b/providers/tests/presto/transfers/test_gcs_to_presto.py similarity index 100% rename from tests/providers/presto/transfers/test_gcs_to_presto.py rename to providers/tests/presto/transfers/test_gcs_to_presto.py diff --git a/tests/providers/qdrant/hooks/__init__.py b/providers/tests/qdrant/__init__.py similarity index 100% rename from tests/providers/qdrant/hooks/__init__.py rename to providers/tests/qdrant/__init__.py diff --git a/tests/providers/qdrant/operators/__init__.py b/providers/tests/qdrant/hooks/__init__.py similarity index 100% rename from tests/providers/qdrant/operators/__init__.py rename to providers/tests/qdrant/hooks/__init__.py diff --git a/tests/providers/qdrant/hooks/test_qdrant.py b/providers/tests/qdrant/hooks/test_qdrant.py similarity index 100% rename from tests/providers/qdrant/hooks/test_qdrant.py rename to providers/tests/qdrant/hooks/test_qdrant.py diff --git a/tests/providers/salesforce/operators/__init__.py b/providers/tests/qdrant/operators/__init__.py similarity index 100% rename from tests/providers/salesforce/operators/__init__.py rename to providers/tests/qdrant/operators/__init__.py diff --git a/tests/providers/qdrant/operators/test_qdrant.py b/providers/tests/qdrant/operators/test_qdrant.py similarity index 100% rename from tests/providers/qdrant/operators/test_qdrant.py rename to providers/tests/qdrant/operators/test_qdrant.py diff --git a/tests/providers/presto/hooks/__init__.py b/providers/tests/redis/__init__.py similarity index 100% rename from tests/providers/presto/hooks/__init__.py rename to providers/tests/redis/__init__.py diff --git a/tests/providers/presto/transfers/__init__.py b/providers/tests/redis/hooks/__init__.py similarity index 100% rename from tests/providers/presto/transfers/__init__.py rename to providers/tests/redis/hooks/__init__.py diff --git a/tests/providers/redis/hooks/test_redis.py b/providers/tests/redis/hooks/test_redis.py similarity index 100% rename from tests/providers/redis/hooks/test_redis.py rename to providers/tests/redis/hooks/test_redis.py diff --git a/tests/providers/redis/__init__.py b/providers/tests/redis/log/__init__.py similarity index 100% rename from tests/providers/redis/__init__.py rename to providers/tests/redis/log/__init__.py diff --git a/tests/providers/redis/log/test_redis_task_handler.py b/providers/tests/redis/log/test_redis_task_handler.py similarity index 98% rename from tests/providers/redis/log/test_redis_task_handler.py rename to providers/tests/redis/log/test_redis_task_handler.py index f4ded2fa586a..bc7345df9a53 100644 --- a/tests/providers/redis/log/test_redis_task_handler.py +++ b/providers/tests/redis/log/test_redis_task_handler.py @@ -28,7 +28,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/redis/hooks/__init__.py b/providers/tests/redis/operators/__init__.py similarity index 100% rename from tests/providers/redis/hooks/__init__.py rename to providers/tests/redis/operators/__init__.py diff --git a/tests/providers/redis/operators/test_redis_publish.py b/providers/tests/redis/operators/test_redis_publish.py similarity index 100% rename from tests/providers/redis/operators/test_redis_publish.py rename to providers/tests/redis/operators/test_redis_publish.py diff --git a/tests/providers/redis/log/__init__.py b/providers/tests/redis/sensors/__init__.py similarity index 100% rename from tests/providers/redis/log/__init__.py rename to providers/tests/redis/sensors/__init__.py diff --git a/tests/providers/redis/sensors/test_redis_key.py b/providers/tests/redis/sensors/test_redis_key.py similarity index 100% rename from tests/providers/redis/sensors/test_redis_key.py rename to providers/tests/redis/sensors/test_redis_key.py diff --git a/tests/providers/redis/sensors/test_redis_pub_sub.py b/providers/tests/redis/sensors/test_redis_pub_sub.py similarity index 100% rename from tests/providers/redis/sensors/test_redis_pub_sub.py rename to providers/tests/redis/sensors/test_redis_pub_sub.py diff --git a/tests/providers/redis/operators/__init__.py b/providers/tests/salesforce/__init__.py similarity index 100% rename from tests/providers/redis/operators/__init__.py rename to providers/tests/salesforce/__init__.py diff --git a/tests/providers/redis/sensors/__init__.py b/providers/tests/salesforce/hooks/__init__.py similarity index 100% rename from tests/providers/redis/sensors/__init__.py rename to providers/tests/salesforce/hooks/__init__.py diff --git a/tests/providers/salesforce/hooks/test_salesforce.py b/providers/tests/salesforce/hooks/test_salesforce.py similarity index 100% rename from tests/providers/salesforce/hooks/test_salesforce.py rename to providers/tests/salesforce/hooks/test_salesforce.py diff --git a/tests/providers/sendgrid/__init__.py b/providers/tests/salesforce/operators/__init__.py similarity index 100% rename from tests/providers/sendgrid/__init__.py rename to providers/tests/salesforce/operators/__init__.py diff --git a/tests/providers/salesforce/operators/test_bulk.py b/providers/tests/salesforce/operators/test_bulk.py similarity index 100% rename from tests/providers/salesforce/operators/test_bulk.py rename to providers/tests/salesforce/operators/test_bulk.py diff --git a/tests/providers/salesforce/operators/test_salesforce_apex_rest.py b/providers/tests/salesforce/operators/test_salesforce_apex_rest.py similarity index 100% rename from tests/providers/salesforce/operators/test_salesforce_apex_rest.py rename to providers/tests/salesforce/operators/test_salesforce_apex_rest.py diff --git a/tests/providers/salesforce/__init__.py b/providers/tests/samba/__init__.py similarity index 100% rename from tests/providers/salesforce/__init__.py rename to providers/tests/samba/__init__.py diff --git a/tests/providers/salesforce/hooks/__init__.py b/providers/tests/samba/hooks/__init__.py similarity index 100% rename from tests/providers/salesforce/hooks/__init__.py rename to providers/tests/samba/hooks/__init__.py diff --git a/tests/providers/samba/hooks/test_samba.py b/providers/tests/samba/hooks/test_samba.py similarity index 100% rename from tests/providers/samba/hooks/test_samba.py rename to providers/tests/samba/hooks/test_samba.py diff --git a/tests/providers/samba/__init__.py b/providers/tests/samba/transfers/__init__.py similarity index 100% rename from tests/providers/samba/__init__.py rename to providers/tests/samba/transfers/__init__.py diff --git a/tests/providers/samba/transfers/test_gcs_to_samba.py b/providers/tests/samba/transfers/test_gcs_to_samba.py similarity index 100% rename from tests/providers/samba/transfers/test_gcs_to_samba.py rename to providers/tests/samba/transfers/test_gcs_to_samba.py diff --git a/tests/providers/samba/hooks/__init__.py b/providers/tests/segment/__init__.py similarity index 100% rename from tests/providers/samba/hooks/__init__.py rename to providers/tests/segment/__init__.py diff --git a/tests/providers/samba/transfers/__init__.py b/providers/tests/segment/hooks/__init__.py similarity index 100% rename from tests/providers/samba/transfers/__init__.py rename to providers/tests/segment/hooks/__init__.py diff --git a/tests/providers/segment/hooks/test_segment.py b/providers/tests/segment/hooks/test_segment.py similarity index 100% rename from tests/providers/segment/hooks/test_segment.py rename to providers/tests/segment/hooks/test_segment.py diff --git a/tests/providers/segment/__init__.py b/providers/tests/segment/operators/__init__.py similarity index 100% rename from tests/providers/segment/__init__.py rename to providers/tests/segment/operators/__init__.py diff --git a/tests/providers/segment/operators/test_segment_track_event.py b/providers/tests/segment/operators/test_segment_track_event.py similarity index 100% rename from tests/providers/segment/operators/test_segment_track_event.py rename to providers/tests/segment/operators/test_segment_track_event.py diff --git a/tests/providers/sendgrid/utils/__init__.py b/providers/tests/sendgrid/__init__.py similarity index 100% rename from tests/providers/sendgrid/utils/__init__.py rename to providers/tests/sendgrid/__init__.py diff --git a/tests/providers/sftp/__init__.py b/providers/tests/sendgrid/utils/__init__.py similarity index 100% rename from tests/providers/sftp/__init__.py rename to providers/tests/sendgrid/utils/__init__.py diff --git a/tests/providers/sendgrid/utils/test_emailer.py b/providers/tests/sendgrid/utils/test_emailer.py similarity index 100% rename from tests/providers/sendgrid/utils/test_emailer.py rename to providers/tests/sendgrid/utils/test_emailer.py diff --git a/tests/providers/sftp/decorators/__init__.py b/providers/tests/sftp/__init__.py similarity index 100% rename from tests/providers/sftp/decorators/__init__.py rename to providers/tests/sftp/__init__.py diff --git a/tests/providers/sftp/decorators/sensors/__init__.py b/providers/tests/sftp/decorators/__init__.py similarity index 100% rename from tests/providers/sftp/decorators/sensors/__init__.py rename to providers/tests/sftp/decorators/__init__.py diff --git a/tests/providers/sftp/hooks/__init__.py b/providers/tests/sftp/decorators/sensors/__init__.py similarity index 100% rename from tests/providers/sftp/hooks/__init__.py rename to providers/tests/sftp/decorators/sensors/__init__.py diff --git a/tests/providers/sftp/decorators/sensors/test_sftp.py b/providers/tests/sftp/decorators/sensors/test_sftp.py similarity index 100% rename from tests/providers/sftp/decorators/sensors/test_sftp.py rename to providers/tests/sftp/decorators/sensors/test_sftp.py diff --git a/tests/providers/sftp/operators/__init__.py b/providers/tests/sftp/hooks/__init__.py similarity index 100% rename from tests/providers/sftp/operators/__init__.py rename to providers/tests/sftp/hooks/__init__.py diff --git a/tests/providers/sftp/hooks/test_sftp.py b/providers/tests/sftp/hooks/test_sftp.py similarity index 99% rename from tests/providers/sftp/hooks/test_sftp.py rename to providers/tests/sftp/hooks/test_sftp.py index 3305f9cb761a..7a7a2991a703 100644 --- a/tests/providers/sftp/hooks/test_sftp.py +++ b/providers/tests/sftp/hooks/test_sftp.py @@ -406,8 +406,9 @@ def test_deprecation_ftp_conn_id(self, mock_get_connection): def test_invalid_ssh_hook(self, mock_get_connection): connection = Connection(conn_id="sftp_default", login="root", host="localhost") mock_get_connection.return_value = connection - with pytest.raises(AirflowException, match="ssh_hook must be an instance of SSHHook"), pytest.warns( - AirflowProviderDeprecationWarning, match=r"Parameter `ssh_hook` is deprecated.*" + with ( + pytest.raises(AirflowException, match="ssh_hook must be an instance of SSHHook"), + pytest.warns(AirflowProviderDeprecationWarning, match=r"Parameter `ssh_hook` is deprecated.*"), ): SFTPHook(ssh_hook="invalid_hook") diff --git a/tests/providers/sftp/sensors/__init__.py b/providers/tests/sftp/operators/__init__.py similarity index 100% rename from tests/providers/sftp/sensors/__init__.py rename to providers/tests/sftp/operators/__init__.py diff --git a/tests/providers/sftp/operators/test_sftp.py b/providers/tests/sftp/operators/test_sftp.py similarity index 97% rename from tests/providers/sftp/operators/test_sftp.py rename to providers/tests/sftp/operators/test_sftp.py index a6835675da26..e6c90c2c74f5 100644 --- a/tests/providers/sftp/operators/test_sftp.py +++ b/providers/tests/sftp/operators/test_sftp.py @@ -35,7 +35,8 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils import timezone from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -151,8 +152,9 @@ def test_file_transfer_no_intermediate_dir_error_put(self, create_task_instance_ operation=SFTPOperation.PUT, create_intermediate_dirs=False, ) - with pytest.raises(AirflowException) as ctx, pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + pytest.raises(AirflowException) as ctx, + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*"), ): ti2.run() assert "No such file" in str(ctx.value) @@ -292,8 +294,11 @@ def test_file_transfer_no_intermediate_dir_error_get(self, dag_maker, create_rem for ti in dag_maker.create_dagrun(execution_date=timezone.utcnow()).task_instances: # This should raise an error with "No such file" as the directory # does not exist. - with pytest.raises(AirflowException) as ctx, pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + pytest.raises(AirflowException) as ctx, + pytest.warns( + AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + ), ): ti.run() assert "No such file" in str(ctx.value) @@ -376,8 +381,9 @@ def test_arg_checking(self): operation=SFTPOperation.PUT, dag=dag, ) - with contextlib.suppress(Exception), pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + contextlib.suppress(Exception), + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*"), ): task_3.execute(None) assert task_3.sftp_hook.ssh_conn_id == self.hook.ssh_conn_id diff --git a/tests/providers/sftp/triggers/__init__.py b/providers/tests/sftp/sensors/__init__.py similarity index 100% rename from tests/providers/sftp/triggers/__init__.py rename to providers/tests/sftp/sensors/__init__.py diff --git a/tests/providers/sftp/sensors/test_sftp.py b/providers/tests/sftp/sensors/test_sftp.py similarity index 100% rename from tests/providers/sftp/sensors/test_sftp.py rename to providers/tests/sftp/sensors/test_sftp.py diff --git a/tests/providers/slack/notifications/__init__.py b/providers/tests/sftp/triggers/__init__.py similarity index 100% rename from tests/providers/slack/notifications/__init__.py rename to providers/tests/sftp/triggers/__init__.py diff --git a/tests/providers/sftp/triggers/test_sftp.py b/providers/tests/sftp/triggers/test_sftp.py similarity index 100% rename from tests/providers/sftp/triggers/test_sftp.py rename to providers/tests/sftp/triggers/test_sftp.py diff --git a/tests/providers/segment/hooks/__init__.py b/providers/tests/singularity/__init__.py similarity index 100% rename from tests/providers/segment/hooks/__init__.py rename to providers/tests/singularity/__init__.py diff --git a/tests/providers/segment/operators/__init__.py b/providers/tests/singularity/operators/__init__.py similarity index 100% rename from tests/providers/segment/operators/__init__.py rename to providers/tests/singularity/operators/__init__.py diff --git a/tests/providers/singularity/operators/test_singularity.py b/providers/tests/singularity/operators/test_singularity.py similarity index 100% rename from tests/providers/singularity/operators/test_singularity.py rename to providers/tests/singularity/operators/test_singularity.py diff --git a/tests/providers/singularity/__init__.py b/providers/tests/slack/__init__.py similarity index 100% rename from tests/providers/singularity/__init__.py rename to providers/tests/slack/__init__.py diff --git a/tests/providers/singularity/operators/__init__.py b/providers/tests/slack/hooks/__init__.py similarity index 100% rename from tests/providers/singularity/operators/__init__.py rename to providers/tests/slack/hooks/__init__.py diff --git a/tests/providers/slack/hooks/test_slack.py b/providers/tests/slack/hooks/test_slack.py similarity index 100% rename from tests/providers/slack/hooks/test_slack.py rename to providers/tests/slack/hooks/test_slack.py diff --git a/tests/providers/slack/hooks/test_slack_webhook.py b/providers/tests/slack/hooks/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/hooks/test_slack_webhook.py rename to providers/tests/slack/hooks/test_slack_webhook.py diff --git a/tests/providers/slack/utils/__init__.py b/providers/tests/slack/notifications/__init__.py similarity index 100% rename from tests/providers/slack/utils/__init__.py rename to providers/tests/slack/notifications/__init__.py diff --git a/tests/providers/slack/notifications/test_slack.py b/providers/tests/slack/notifications/test_slack.py similarity index 100% rename from tests/providers/slack/notifications/test_slack.py rename to providers/tests/slack/notifications/test_slack.py diff --git a/tests/providers/slack/notifications/test_slack_webhook.py b/providers/tests/slack/notifications/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/notifications/test_slack_webhook.py rename to providers/tests/slack/notifications/test_slack_webhook.py diff --git a/tests/providers/slack/__init__.py b/providers/tests/slack/operators/__init__.py similarity index 100% rename from tests/providers/slack/__init__.py rename to providers/tests/slack/operators/__init__.py diff --git a/tests/providers/slack/operators/test_slack.py b/providers/tests/slack/operators/test_slack.py similarity index 98% rename from tests/providers/slack/operators/test_slack.py rename to providers/tests/slack/operators/test_slack.py index 603cef7835de..f276ee43159e 100644 --- a/tests/providers/slack/operators/test_slack.py +++ b/providers/tests/slack/operators/test_slack.py @@ -362,7 +362,8 @@ def test_partial_both_channel_parameters(self, channel, channels, dag_maker, ses with set_current_task_instance_session(session=session): warning_match = r"Argument `channel` is deprecated.*use `channels` instead" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Cannot set both arguments" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Cannot set both arguments"), ): ti.render_templates() diff --git a/tests/providers/slack/operators/test_slack_webhook.py b/providers/tests/slack/operators/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/operators/test_slack_webhook.py rename to providers/tests/slack/operators/test_slack_webhook.py diff --git a/tests/providers/slack/hooks/__init__.py b/providers/tests/slack/transfers/__init__.py similarity index 100% rename from tests/providers/slack/hooks/__init__.py rename to providers/tests/slack/transfers/__init__.py diff --git a/tests/providers/slack/transfers/conftest.py b/providers/tests/slack/transfers/conftest.py similarity index 100% rename from tests/providers/slack/transfers/conftest.py rename to providers/tests/slack/transfers/conftest.py diff --git a/tests/providers/slack/transfers/test_base_sql_to_slack.py b/providers/tests/slack/transfers/test_base_sql_to_slack.py similarity index 100% rename from tests/providers/slack/transfers/test_base_sql_to_slack.py rename to providers/tests/slack/transfers/test_base_sql_to_slack.py diff --git a/tests/providers/slack/transfers/test_sql_to_slack.py b/providers/tests/slack/transfers/test_sql_to_slack.py similarity index 100% rename from tests/providers/slack/transfers/test_sql_to_slack.py rename to providers/tests/slack/transfers/test_sql_to_slack.py diff --git a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py b/providers/tests/slack/transfers/test_sql_to_slack_webhook.py similarity index 96% rename from tests/providers/slack/transfers/test_sql_to_slack_webhook.py rename to providers/tests/slack/transfers/test_sql_to_slack_webhook.py index 2f6ef63bc687..c56f895b7bc7 100644 --- a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py +++ b/providers/tests/slack/transfers/test_sql_to_slack_webhook.py @@ -164,8 +164,9 @@ def test_conflicting_conn_id(self): "slack_message": "message: {{ ds }}, {{ xxxx }}", "sql": "sql {{ ds }}", } - with pytest.raises(ValueError, match="Conflicting Connection ids provided"), pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `slack_conn_id` is deprecated" + with ( + pytest.raises(ValueError, match="Conflicting Connection ids provided"), + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `slack_conn_id` is deprecated"), ): self._construct_operator(**operator_args, slack_webhook_conn_id="foo", slack_conn_id="bar") @@ -312,7 +313,8 @@ def test_partial_ambiguous_slack_connections(self, dag_maker, session): with set_current_task_instance_session(session=session): warning_match = r"Parameter `slack_conn_id` is deprecated" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Conflicting Connection ids provided" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Conflicting Connection ids provided"), ): ti.render_templates() diff --git a/tests/providers/smtp/notifications/__init__.py b/providers/tests/slack/utils/__init__.py similarity index 100% rename from tests/providers/smtp/notifications/__init__.py rename to providers/tests/slack/utils/__init__.py diff --git a/tests/providers/slack/utils/test_utils.py b/providers/tests/slack/utils/test_utils.py similarity index 100% rename from tests/providers/slack/utils/test_utils.py rename to providers/tests/slack/utils/test_utils.py diff --git a/tests/providers/slack/operators/__init__.py b/providers/tests/smtp/__init__.py similarity index 100% rename from tests/providers/slack/operators/__init__.py rename to providers/tests/smtp/__init__.py diff --git a/tests/providers/slack/transfers/__init__.py b/providers/tests/smtp/hooks/__init__.py similarity index 100% rename from tests/providers/slack/transfers/__init__.py rename to providers/tests/smtp/hooks/__init__.py diff --git a/tests/providers/smtp/hooks/test_smtp.py b/providers/tests/smtp/hooks/test_smtp.py similarity index 99% rename from tests/providers/smtp/hooks/test_smtp.py rename to providers/tests/smtp/hooks/test_smtp.py index 505fc303051e..04a20e0ca6f0 100644 --- a/tests/providers/smtp/hooks/test_smtp.py +++ b/providers/tests/smtp/hooks/test_smtp.py @@ -30,7 +30,8 @@ from airflow.providers.smtp.hooks.smtp import SmtpHook from airflow.utils import db from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/snowflake/__init__.py b/providers/tests/smtp/notifications/__init__.py similarity index 100% rename from tests/providers/snowflake/__init__.py rename to providers/tests/smtp/notifications/__init__.py diff --git a/tests/providers/smtp/notifications/test_smtp.py b/providers/tests/smtp/notifications/test_smtp.py similarity index 96% rename from tests/providers/smtp/notifications/test_smtp.py rename to providers/tests/smtp/notifications/test_smtp.py index 39b51e8e02ce..75e1ebf530fa 100644 --- a/tests/providers/smtp/notifications/test_smtp.py +++ b/providers/tests/smtp/notifications/test_smtp.py @@ -31,8 +31,9 @@ send_smtp_notification, ) from airflow.utils import timezone -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -182,9 +183,10 @@ def test_notifier_with_nondefault_conf_vars(self, mock_smtphook_hook, create_tas ti = create_task_instance(dag_id="dag", task_id="op", execution_date=timezone.datetime(2018, 1, 1)) context = {"dag": ti.dag_run.dag, "ti": ti} - with tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_subject, tempfile.NamedTemporaryFile( - mode="wt", suffix=".txt" - ) as f_content: + with ( + tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_subject, + tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_content, + ): f_subject.write("Task {{ ti.task_id }} failed") f_subject.flush() diff --git a/tests/providers/smtp/__init__.py b/providers/tests/smtp/operators/__init__.py similarity index 100% rename from tests/providers/smtp/__init__.py rename to providers/tests/smtp/operators/__init__.py diff --git a/tests/providers/smtp/operators/test_smtp.py b/providers/tests/smtp/operators/test_smtp.py similarity index 100% rename from tests/providers/smtp/operators/test_smtp.py rename to providers/tests/smtp/operators/test_smtp.py diff --git a/tests/providers/snowflake/decorators/__init__.py b/providers/tests/snowflake/__init__.py similarity index 100% rename from tests/providers/snowflake/decorators/__init__.py rename to providers/tests/snowflake/__init__.py diff --git a/tests/providers/snowflake/hooks/__init__.py b/providers/tests/snowflake/decorators/__init__.py similarity index 100% rename from tests/providers/snowflake/hooks/__init__.py rename to providers/tests/snowflake/decorators/__init__.py diff --git a/tests/providers/snowflake/decorators/test_snowpark.py b/providers/tests/snowflake/decorators/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/decorators/test_snowpark.py rename to providers/tests/snowflake/decorators/test_snowpark.py diff --git a/tests/providers/snowflake/operators/__init__.py b/providers/tests/snowflake/hooks/__init__.py similarity index 100% rename from tests/providers/snowflake/operators/__init__.py rename to providers/tests/snowflake/hooks/__init__.py diff --git a/tests/providers/snowflake/hooks/test_snowflake.py b/providers/tests/snowflake/hooks/test_snowflake.py similarity index 93% rename from tests/providers/snowflake/hooks/test_snowflake.py rename to providers/tests/snowflake/hooks/test_snowflake.py index 9ef0c4d2a571..b7c9382654be 100644 --- a/tests/providers/snowflake/hooks/test_snowflake.py +++ b/providers/tests/snowflake/hooks/test_snowflake.py @@ -394,9 +394,10 @@ def test_get_conn_params_should_support_private_auth_with_unencrypted_key( with mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()): assert "private_key" in SnowflakeHook(snowflake_conn_id="test_conn")._get_conn_params connection_kwargs["password"] = _PASSWORD - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(TypeError, match="Password was given but private key is not encrypted."): + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + pytest.raises(TypeError, match="Password was given but private key is not encrypted."), + ): SnowflakeHook(snowflake_conn_id="test_conn")._get_conn_params def test_get_conn_params_should_fail_on_invalid_key(self): @@ -412,9 +413,10 @@ def test_get_conn_params_should_fail_on_invalid_key(self): "private_key_file": "/dev/urandom", }, } - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(ValueError, match="The private_key_file path points to an empty or invalid file."): + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + pytest.raises(ValueError, match="The private_key_file path points to an empty or invalid file."), + ): SnowflakeHook(snowflake_conn_id="test_conn").get_conn() def test_should_add_partner_info(self): @@ -428,18 +430,24 @@ def test_should_add_partner_info(self): ) def test_get_conn_should_call_connect(self): - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.connector") as mock_connector: + with ( + mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() + ), + mock.patch("airflow.providers.snowflake.hooks.snowflake.connector") as mock_connector, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_conn() mock_connector.connect.assert_called_once_with(**hook._get_conn_params) assert mock_connector.connect.return_value == conn def test_get_sqlalchemy_engine_should_support_pass_auth(self): - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() + ), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -452,9 +460,10 @@ def test_get_sqlalchemy_engine_should_support_insecure_mode(self): connection_kwargs = deepcopy(BASE_CONNECTION_KWARGS) connection_kwargs["extra"]["extra__snowflake__insecure_mode"] = "True" - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -468,9 +477,10 @@ def test_get_sqlalchemy_engine_should_support_session_parameters(self): connection_kwargs = deepcopy(BASE_CONNECTION_KWARGS) connection_kwargs["extra"]["session_parameters"] = {"TEST_PARAM": "AA", "TEST_PARAM_B": 123} - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -485,9 +495,10 @@ def test_get_sqlalchemy_engine_should_support_private_key_auth(self, non_encrypt connection_kwargs["password"] = "" connection_kwargs["extra"]["private_key_file"] = str(non_encrypted_temporary_private_key) - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() assert "private_key" in mock_create_engine.call_args.kwargs["connect_args"] diff --git a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py b/providers/tests/snowflake/hooks/test_snowflake_sql_api.py similarity index 97% rename from tests/providers/snowflake/hooks/test_snowflake_sql_api.py rename to providers/tests/snowflake/hooks/test_snowflake_sql_api.py index df3f06db2f30..1247e3e03182 100644 --- a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py +++ b/providers/tests/snowflake/hooks/test_snowflake_sql_api.py @@ -442,12 +442,15 @@ def test_get_private_key_raise_exception(self, encrypted_temporary_private_key: }, } hook = SnowflakeSqlApiHook(snowflake_conn_id="test_conn") - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises( - AirflowException, - match="The private_key_file and private_key_content extra fields are mutually " - "exclusive. Please remove one.", + with ( + unittest.mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + ), + pytest.raises( + AirflowException, + match="The private_key_file and private_key_content extra fields are mutually " + "exclusive. Please remove one.", + ), ): hook.get_private_key() @@ -504,9 +507,12 @@ def test_get_private_key_should_support_private_auth_with_unencrypted_key( hook.get_private_key() assert hook.private_key is not None connection_kwargs["password"] = _PASSWORD - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(TypeError, match="Password was given but private key is not encrypted."): + with ( + unittest.mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + ), + pytest.raises(TypeError, match="Password was given but private key is not encrypted."), + ): SnowflakeSqlApiHook(snowflake_conn_id="test_conn").get_private_key() @pytest.mark.parametrize( diff --git a/tests/providers/snowflake/hooks/test_sql.py b/providers/tests/snowflake/hooks/test_sql.py similarity index 100% rename from tests/providers/snowflake/hooks/test_sql.py rename to providers/tests/snowflake/hooks/test_sql.py diff --git a/tests/providers/snowflake/transfers/__init__.py b/providers/tests/snowflake/operators/__init__.py similarity index 100% rename from tests/providers/snowflake/transfers/__init__.py rename to providers/tests/snowflake/operators/__init__.py diff --git a/tests/providers/snowflake/operators/test_snowflake.py b/providers/tests/snowflake/operators/test_snowflake.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowflake.py rename to providers/tests/snowflake/operators/test_snowflake.py diff --git a/tests/providers/snowflake/operators/test_snowflake_sql.py b/providers/tests/snowflake/operators/test_snowflake_sql.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowflake_sql.py rename to providers/tests/snowflake/operators/test_snowflake_sql.py diff --git a/tests/providers/snowflake/operators/test_snowpark.py b/providers/tests/snowflake/operators/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowpark.py rename to providers/tests/snowflake/operators/test_snowpark.py diff --git a/tests/providers/snowflake/triggers/__init__.py b/providers/tests/snowflake/transfers/__init__.py similarity index 100% rename from tests/providers/snowflake/triggers/__init__.py rename to providers/tests/snowflake/transfers/__init__.py diff --git a/tests/providers/snowflake/transfers/test_copy_into_snowflake.py b/providers/tests/snowflake/transfers/test_copy_into_snowflake.py similarity index 100% rename from tests/providers/snowflake/transfers/test_copy_into_snowflake.py rename to providers/tests/snowflake/transfers/test_copy_into_snowflake.py diff --git a/tests/providers/snowflake/utils/__init__.py b/providers/tests/snowflake/triggers/__init__.py similarity index 100% rename from tests/providers/snowflake/utils/__init__.py rename to providers/tests/snowflake/triggers/__init__.py diff --git a/tests/providers/snowflake/triggers/test_snowflake.py b/providers/tests/snowflake/triggers/test_snowflake.py similarity index 100% rename from tests/providers/snowflake/triggers/test_snowflake.py rename to providers/tests/snowflake/triggers/test_snowflake.py diff --git a/tests/providers/standard/__init__.py b/providers/tests/snowflake/utils/__init__.py similarity index 100% rename from tests/providers/standard/__init__.py rename to providers/tests/snowflake/utils/__init__.py diff --git a/tests/providers/snowflake/utils/test_common.py b/providers/tests/snowflake/utils/test_common.py similarity index 100% rename from tests/providers/snowflake/utils/test_common.py rename to providers/tests/snowflake/utils/test_common.py diff --git a/tests/providers/snowflake/utils/test_openlineage.py b/providers/tests/snowflake/utils/test_openlineage.py similarity index 100% rename from tests/providers/snowflake/utils/test_openlineage.py rename to providers/tests/snowflake/utils/test_openlineage.py diff --git a/tests/providers/snowflake/utils/test_snowpark.py b/providers/tests/snowflake/utils/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/utils/test_snowpark.py rename to providers/tests/snowflake/utils/test_snowpark.py diff --git a/tests/providers/snowflake/utils/test_sql_api_generate_jwt.py b/providers/tests/snowflake/utils/test_sql_api_generate_jwt.py similarity index 100% rename from tests/providers/snowflake/utils/test_sql_api_generate_jwt.py rename to providers/tests/snowflake/utils/test_sql_api_generate_jwt.py diff --git a/tests/providers/smtp/hooks/__init__.py b/providers/tests/sqlite/__init__.py similarity index 100% rename from tests/providers/smtp/hooks/__init__.py rename to providers/tests/sqlite/__init__.py diff --git a/tests/providers/smtp/operators/__init__.py b/providers/tests/sqlite/hooks/__init__.py similarity index 100% rename from tests/providers/smtp/operators/__init__.py rename to providers/tests/sqlite/hooks/__init__.py diff --git a/tests/providers/sqlite/hooks/test_sqlite.py b/providers/tests/sqlite/hooks/test_sqlite.py similarity index 100% rename from tests/providers/sqlite/hooks/test_sqlite.py rename to providers/tests/sqlite/hooks/test_sqlite.py diff --git a/tests/providers/sqlite/__init__.py b/providers/tests/sqlite/operators/__init__.py similarity index 100% rename from tests/providers/sqlite/__init__.py rename to providers/tests/sqlite/operators/__init__.py diff --git a/tests/providers/sqlite/operators/test_sqlite.py b/providers/tests/sqlite/operators/test_sqlite.py similarity index 100% rename from tests/providers/sqlite/operators/test_sqlite.py rename to providers/tests/sqlite/operators/test_sqlite.py diff --git a/tests/providers/sqlite/hooks/__init__.py b/providers/tests/ssh/__init__.py similarity index 100% rename from tests/providers/sqlite/hooks/__init__.py rename to providers/tests/ssh/__init__.py diff --git a/tests/providers/sqlite/operators/__init__.py b/providers/tests/ssh/hooks/__init__.py similarity index 100% rename from tests/providers/sqlite/operators/__init__.py rename to providers/tests/ssh/hooks/__init__.py diff --git a/tests/providers/ssh/hooks/test_ssh.py b/providers/tests/ssh/hooks/test_ssh.py similarity index 99% rename from tests/providers/ssh/hooks/test_ssh.py rename to providers/tests/ssh/hooks/test_ssh.py index 71661e5b4ec6..6b3e5dcdae11 100644 --- a/tests/providers/ssh/hooks/test_ssh.py +++ b/providers/tests/ssh/hooks/test_ssh.py @@ -534,8 +534,9 @@ def test_tunnel(self): args=["python", "-c", HELLO_SERVER_CMD], stdout=subprocess.PIPE, ) - with subprocess.Popen(**subprocess_kwargs) as server_handle, hook.get_tunnel( - local_port=2135, remote_port=2134 + with ( + subprocess.Popen(**subprocess_kwargs) as server_handle, + hook.get_tunnel(local_port=2135, remote_port=2134), ): server_output = server_handle.stdout.read(5) assert b"ready" == server_output diff --git a/tests/providers/ssh/__init__.py b/providers/tests/ssh/operators/__init__.py similarity index 100% rename from tests/providers/ssh/__init__.py rename to providers/tests/ssh/operators/__init__.py diff --git a/tests/providers/ssh/operators/test_ssh.py b/providers/tests/ssh/operators/test_ssh.py similarity index 99% rename from tests/providers/ssh/operators/test_ssh.py rename to providers/tests/ssh/operators/test_ssh.py index 9b4ecbbb20db..b304a24882bb 100644 --- a/tests/providers/ssh/operators/test_ssh.py +++ b/providers/tests/ssh/operators/test_ssh.py @@ -31,7 +31,8 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/standard/operators/__init__.py b/providers/tests/standard/__init__.py similarity index 100% rename from tests/providers/standard/operators/__init__.py rename to providers/tests/standard/__init__.py diff --git a/tests/providers/standard/sensors/__init__.py b/providers/tests/standard/operators/__init__.py similarity index 100% rename from tests/providers/standard/sensors/__init__.py rename to providers/tests/standard/operators/__init__.py diff --git a/tests/providers/standard/operators/test_bash.py b/providers/tests/standard/operators/test_bash.py similarity index 99% rename from tests/providers/standard/operators/test_bash.py rename to providers/tests/standard/operators/test_bash.py index 2c29a0b96dc9..305651ee596a 100644 --- a/tests/providers/standard/operators/test_bash.py +++ b/providers/tests/standard/operators/test_bash.py @@ -32,7 +32,8 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/standard/operators/test_datetime.py b/providers/tests/standard/operators/test_datetime.py similarity index 99% rename from tests/providers/standard/operators/test_datetime.py rename to providers/tests/standard/operators/test_datetime.py index 530450a963bd..4fbb0863d35a 100644 --- a/tests/providers/standard/operators/test_datetime.py +++ b/providers/tests/standard/operators/test_datetime.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/standard/operators/test_weekday.py b/providers/tests/standard/operators/test_weekday.py similarity index 99% rename from tests/providers/standard/operators/test_weekday.py rename to providers/tests/standard/operators/test_weekday.py index 7ad7415c366f..6c20299d15e7 100644 --- a/tests/providers/standard/operators/test_weekday.py +++ b/providers/tests/standard/operators/test_weekday.py @@ -32,7 +32,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.weekday import WeekDay -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/tableau/__init__.py b/providers/tests/standard/sensors/__init__.py similarity index 100% rename from tests/providers/tableau/__init__.py rename to providers/tests/standard/sensors/__init__.py diff --git a/tests/providers/standard/sensors/test_bash.py b/providers/tests/standard/sensors/test_bash.py similarity index 100% rename from tests/providers/standard/sensors/test_bash.py rename to providers/tests/standard/sensors/test_bash.py diff --git a/tests/providers/standard/sensors/test_date_time.py b/providers/tests/standard/sensors/test_date_time.py similarity index 100% rename from tests/providers/standard/sensors/test_date_time.py rename to providers/tests/standard/sensors/test_date_time.py diff --git a/tests/providers/standard/sensors/test_time.py b/providers/tests/standard/sensors/test_time.py similarity index 100% rename from tests/providers/standard/sensors/test_time.py rename to providers/tests/standard/sensors/test_time.py diff --git a/tests/providers/standard/sensors/test_time_delta.py b/providers/tests/standard/sensors/test_time_delta.py similarity index 100% rename from tests/providers/standard/sensors/test_time_delta.py rename to providers/tests/standard/sensors/test_time_delta.py diff --git a/tests/providers/standard/sensors/test_weekday.py b/providers/tests/standard/sensors/test_weekday.py similarity index 99% rename from tests/providers/standard/sensors/test_weekday.py rename to providers/tests/standard/sensors/test_weekday.py index d4a0f04c2290..ef0091a9fdd3 100644 --- a/tests/providers/standard/sensors/test_weekday.py +++ b/providers/tests/standard/sensors/test_weekday.py @@ -27,7 +27,8 @@ from airflow.providers.standard.sensors.weekday import DayOfWeekSensor from airflow.utils.timezone import datetime from airflow.utils.weekday import WeekDay -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = pytest.mark.db_test diff --git a/tests/providers/tableau/operators/__init__.py b/providers/tests/system/__init__.py similarity index 100% rename from tests/providers/tableau/operators/__init__.py rename to providers/tests/system/__init__.py diff --git a/tests/providers/tableau/sensors/__init__.py b/providers/tests/system/airbyte/__init__.py similarity index 100% rename from tests/providers/tableau/sensors/__init__.py rename to providers/tests/system/airbyte/__init__.py diff --git a/tests/system/providers/airbyte/example_airbyte_trigger_job.py b/providers/tests/system/airbyte/example_airbyte_trigger_job.py similarity index 96% rename from tests/system/providers/airbyte/example_airbyte_trigger_job.py rename to providers/tests/system/airbyte/example_airbyte_trigger_job.py index 4ff2b5e0d48c..1dd3f01d7261 100644 --- a/tests/system/providers/airbyte/example_airbyte_trigger_job.py +++ b/providers/tests/system/airbyte/example_airbyte_trigger_job.py @@ -61,7 +61,7 @@ # Task dependency created via `XComArgs`: # async_source_destination >> airbyte_sensor -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/__init__.py b/providers/tests/system/alibaba/__init__.py similarity index 100% rename from tests/providers/teradata/__init__.py rename to providers/tests/system/alibaba/__init__.py diff --git a/tests/system/providers/alibaba/example_adb_spark_batch.py b/providers/tests/system/alibaba/example_adb_spark_batch.py similarity index 93% rename from tests/system/providers/alibaba/example_adb_spark_batch.py rename to providers/tests/system/alibaba/example_adb_spark_batch.py index 9f23693066ae..35e03a088871 100644 --- a/tests/system/providers/alibaba/example_adb_spark_batch.py +++ b/providers/tests/system/alibaba/example_adb_spark_batch.py @@ -50,14 +50,14 @@ spark_pi >> spark_lr - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_batch] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_adb_spark_sql.py b/providers/tests/system/alibaba/example_adb_spark_sql.py similarity index 93% rename from tests/system/providers/alibaba/example_adb_spark_sql.py rename to providers/tests/system/alibaba/example_adb_spark_sql.py index fcfe4b896ccb..ac29330d451d 100644 --- a/tests/system/providers/alibaba/example_adb_spark_sql.py +++ b/providers/tests/system/alibaba/example_adb_spark_sql.py @@ -42,14 +42,14 @@ show_databases >> show_tables - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_sql] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_oss_bucket.py b/providers/tests/system/alibaba/example_oss_bucket.py similarity index 93% rename from tests/system/providers/alibaba/example_oss_bucket.py rename to providers/tests/system/alibaba/example_oss_bucket.py index 1e39d3eb4503..4870a3b57797 100644 --- a/tests/system/providers/alibaba/example_oss_bucket.py +++ b/providers/tests/system/alibaba/example_oss_bucket.py @@ -41,14 +41,14 @@ create_bucket >> delete_bucket - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_oss_bucket] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_oss_object.py b/providers/tests/system/alibaba/example_oss_object.py similarity index 94% rename from tests/system/providers/alibaba/example_oss_object.py rename to providers/tests/system/alibaba/example_oss_object.py index 5b73fb1ba7a6..7305c05bf838 100644 --- a/tests/system/providers/alibaba/example_oss_object.py +++ b/providers/tests/system/alibaba/example_oss_object.py @@ -65,14 +65,14 @@ create_object >> download_object >> delete_object >> delete_batch_object - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/CONTRIBUTING.md b/providers/tests/system/amazon/CONTRIBUTING.md similarity index 99% rename from tests/system/providers/amazon/CONTRIBUTING.md rename to providers/tests/system/amazon/CONTRIBUTING.md index b9aaaa66915c..f12062aa7fb5 100644 --- a/tests/system/providers/amazon/CONTRIBUTING.md +++ b/providers/tests/system/amazon/CONTRIBUTING.md @@ -212,7 +212,7 @@ chain( task2, # task2 has trigger rule "all done" defined ) -from tests.system.utils.watcher import watcher +from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG diff --git a/tests/system/providers/amazon/README.md b/providers/tests/system/amazon/README.md similarity index 100% rename from tests/system/providers/amazon/README.md rename to providers/tests/system/amazon/README.md diff --git a/tests/providers/ssh/hooks/__init__.py b/providers/tests/system/amazon/__init__.py similarity index 100% rename from tests/providers/ssh/hooks/__init__.py rename to providers/tests/system/amazon/__init__.py diff --git a/tests/providers/ssh/operators/__init__.py b/providers/tests/system/amazon/aws/__init__.py similarity index 100% rename from tests/providers/ssh/operators/__init__.py rename to providers/tests/system/amazon/aws/__init__.py diff --git a/tests/system/providers/amazon/aws/example_appflow.py b/providers/tests/system/amazon/aws/example_appflow.py similarity index 94% rename from tests/system/providers/amazon/aws/example_appflow.py rename to providers/tests/system/amazon/aws/example_appflow.py index 5ba38533b021..db9dd9a42894 100644 --- a/tests/system/providers/amazon/aws/example_appflow.py +++ b/providers/tests/system/amazon/aws/example_appflow.py @@ -28,7 +28,8 @@ AppflowRunFullOperator, ) from airflow.providers.standard.operators.bash import BashOperator -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -110,13 +111,13 @@ should_be_skipped, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_appflow_run.py b/providers/tests/system/amazon/aws/example_appflow_run.py similarity index 97% rename from tests/system/providers/amazon/aws/example_appflow_run.py rename to providers/tests/system/amazon/aws/example_appflow_run.py index d73a42612780..d1151d1dd5c8 100644 --- a/tests/system/providers/amazon/aws/example_appflow_run.py +++ b/providers/tests/system/amazon/aws/example_appflow_run.py @@ -33,7 +33,8 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -192,13 +193,13 @@ def delete_flow(flow_name: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_athena.py b/providers/tests/system/amazon/aws/example_athena.py similarity index 96% rename from tests/system/providers/amazon/aws/example_athena.py rename to providers/tests/system/amazon/aws/example_athena.py index 92c56c24ee7f..2ee1c11ab788 100644 --- a/tests/system/providers/amazon/aws/example_athena.py +++ b/providers/tests/system/amazon/aws/example_athena.py @@ -32,7 +32,8 @@ ) from airflow.providers.amazon.aws.sensors.athena import AthenaSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -173,14 +174,14 @@ def read_results_from_s3(bucket_name, query_execution_id): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_azure_blob_to_s3.py b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_azure_blob_to_s3.py rename to providers/tests/system/amazon/aws/example_azure_blob_to_s3.py index 0109b8813063..78a55a6a7f7d 100644 --- a/tests/system/providers/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -71,13 +72,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_batch.py b/providers/tests/system/amazon/aws/example_batch.py similarity index 97% rename from tests/system/providers/amazon/aws/example_batch.py rename to providers/tests/system/amazon/aws/example_batch.py index b33078407a29..da035050a77b 100644 --- a/tests/system/providers/amazon/aws/example_batch.py +++ b/providers/tests/system/amazon/aws/example_batch.py @@ -32,7 +32,8 @@ BatchSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ( + +from providers.tests.system.amazon.aws.utils import ( ENV_ID_KEY, SystemTestContextBuilder, prune_logs, @@ -282,13 +283,13 @@ def delete_job_queue(job_queue_name): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_bedrock.py b/providers/tests/system/amazon/aws/example_bedrock.py similarity index 97% rename from tests/system/providers/amazon/aws/example_bedrock.py rename to providers/tests/system/amazon/aws/example_bedrock.py index 3c015496f992..8e67ee4d3880 100644 --- a/tests/system/providers/amazon/aws/example_bedrock.py +++ b/providers/tests/system/amazon/aws/example_bedrock.py @@ -43,7 +43,8 @@ ) from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: ROLE_ARN_KEY = "ROLE_ARN" @@ -217,14 +218,14 @@ def run_or_skip(): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py similarity index 99% rename from tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py rename to providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py index a1d1211da4c4..0d9eb278460f 100644 --- a/tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +++ b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py @@ -60,7 +60,8 @@ from airflow.providers.amazon.aws.utils import get_botocore_version from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ####################################################################### # NOTE: @@ -593,14 +594,14 @@ def delete_opensearch_policies(collection_name: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_cloudformation.py b/providers/tests/system/amazon/aws/example_cloudformation.py similarity index 95% rename from tests/system/providers/amazon/aws/example_cloudformation.py rename to providers/tests/system/amazon/aws/example_cloudformation.py index a376addd3489..38827eebbdb5 100644 --- a/tests/system/providers/amazon/aws/example_cloudformation.py +++ b/providers/tests/system/amazon/aws/example_cloudformation.py @@ -30,7 +30,8 @@ CloudFormationDeleteStackSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -107,14 +108,14 @@ wait_for_stack_delete, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_comprehend.py b/providers/tests/system/amazon/aws/example_comprehend.py similarity index 95% rename from tests/system/providers/amazon/aws/example_comprehend.py rename to providers/tests/system/amazon/aws/example_comprehend.py index 58e34329b67f..d8a1f9af88b6 100644 --- a/tests/system/providers/amazon/aws/example_comprehend.py +++ b/providers/tests/system/amazon/aws/example_comprehend.py @@ -32,7 +32,8 @@ ComprehendStartPiiEntitiesDetectionJobCompletedSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -125,13 +126,13 @@ def pii_entities_detection_job_workflow(): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_comprehend_document_classifier.py b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py similarity index 97% rename from tests/system/providers/amazon/aws/example_comprehend_document_classifier.py rename to providers/tests/system/amazon/aws/example_comprehend_document_classifier.py index 08750da98141..f5308265a83c 100644 --- a/tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +++ b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py @@ -37,7 +37,8 @@ ) from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -230,13 +231,13 @@ def delete_connection(conn_id): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_datasync.py b/providers/tests/system/amazon/aws/example_datasync.py similarity index 97% rename from tests/system/providers/amazon/aws/example_datasync.py rename to providers/tests/system/amazon/aws/example_datasync.py index 8f57d1c65606..a8363fe81b36 100644 --- a/tests/system/providers/amazon/aws/example_datasync.py +++ b/providers/tests/system/amazon/aws/example_datasync.py @@ -26,7 +26,8 @@ from airflow.providers.amazon.aws.operators.datasync import DataSyncOperator from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_datasync" @@ -234,13 +235,13 @@ def delete_locations(locations): delete_s3_bucket_destination, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dms.py b/providers/tests/system/amazon/aws/example_dms.py similarity index 98% rename from tests/system/providers/amazon/aws/example_dms.py rename to providers/tests/system/amazon/aws/example_dms.py index 32506a0268bf..b2e99daf6290 100644 --- a/tests/system/providers/amazon/aws/example_dms.py +++ b/providers/tests/system/amazon/aws/example_dms.py @@ -46,8 +46,9 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.dms import DmsTaskBaseSensor, DmsTaskCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.ec2 import get_default_vpc_id + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id DAG_ID = "example_dms" ROLE_ARN_KEY = "ROLE_ARN" @@ -428,13 +429,13 @@ def delete_security_group(security_group_id: str, security_group_name: str): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dynamodb.py b/providers/tests/system/amazon/aws/example_dynamodb.py similarity index 95% rename from tests/system/providers/amazon/aws/example_dynamodb.py rename to providers/tests/system/amazon/aws/example_dynamodb.py index 6c3d770c9ed9..3d709c36b02a 100644 --- a/tests/system/providers/amazon/aws/example_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_dynamodb.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # TODO: FIXME The argument types here seems somewhat tricky to fix # mypy: disable-error-code="arg-type" @@ -116,14 +117,14 @@ def delete_table(table_name: str): delete_table, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dynamodb_to_s3.py b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py similarity index 97% rename from tests/system/providers/amazon/aws/example_dynamodb_to_s3.py rename to providers/tests/system/amazon/aws/example_dynamodb_to_s3.py index e22bc2080d5e..86a5d76c9587 100644 --- a/tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +++ b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py @@ -31,7 +31,8 @@ from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) @@ -255,13 +256,13 @@ def skip_incremental_export(start_time: datetime, end_time: datetime): delete_table, delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ec2.py b/providers/tests/system/amazon/aws/example_ec2.py similarity index 97% rename from tests/system/providers/amazon/aws/example_ec2.py rename to providers/tests/system/amazon/aws/example_ec2.py index 10b9c62338b9..e55adab775ff 100644 --- a/tests/system/providers/amazon/aws/example_ec2.py +++ b/providers/tests/system/amazon/aws/example_ec2.py @@ -34,7 +34,8 @@ ) from airflow.providers.amazon.aws.sensors.ec2 import EC2InstanceStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ec2" @@ -201,13 +202,13 @@ def parse_response(instance_ids: list): delete_key_pair(key_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ecs.py b/providers/tests/system/amazon/aws/example_ecs.py similarity index 97% rename from tests/system/providers/amazon/aws/example_ecs.py rename to providers/tests/system/amazon/aws/example_ecs.py index 98617dcbcb6d..e1aad99b5e7c 100644 --- a/tests/system/providers/amazon/aws/example_ecs.py +++ b/providers/tests/system/amazon/aws/example_ecs.py @@ -36,7 +36,8 @@ EcsTaskDefinitionStateSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs" @@ -211,14 +212,14 @@ def clean_logs(group_name: str): clean_logs(log_group_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ecs_fargate.py b/providers/tests/system/amazon/aws/example_ecs_fargate.py similarity index 96% rename from tests/system/providers/amazon/aws/example_ecs_fargate.py rename to providers/tests/system/amazon/aws/example_ecs_fargate.py index 6a299c788ca1..ca592234aa77 100644 --- a/tests/system/providers/amazon/aws/example_ecs_fargate.py +++ b/providers/tests/system/amazon/aws/example_ecs_fargate.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.operators.ecs import EcsRunTaskOperator from airflow.providers.amazon.aws.sensors.ecs import EcsTaskStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs_fargate" @@ -155,13 +156,13 @@ def delete_cluster(cluster_name: str) -> None: delete_cluster(cluster_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_templated.py b/providers/tests/system/amazon/aws/example_eks_templated.py similarity index 96% rename from tests/system/providers/amazon/aws/example_eks_templated.py rename to providers/tests/system/amazon/aws/example_eks_templated.py index 7ad8bc77c2ec..3b1ba5059725 100644 --- a/tests/system/providers/amazon/aws/example_eks_templated.py +++ b/providers/tests/system/amazon/aws/example_eks_templated.py @@ -29,7 +29,8 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -147,13 +148,13 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py rename to providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index ae67a26588bd..27ee2be0d728 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -28,8 +28,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_in_one_step" @@ -140,14 +141,14 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py rename to providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 9cce50b9bac4..34cbed1645fa 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -30,8 +30,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_profile" @@ -174,13 +175,13 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py rename to providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py index 9bfce2cc1696..11f843688524 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py @@ -31,8 +31,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroup_in_one_step" @@ -154,14 +155,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py similarity index 96% rename from tests/system/providers/amazon/aws/example_eks_with_nodegroups.py rename to providers/tests/system/amazon/aws/example_eks_with_nodegroups.py index a6681ba67f73..8356a8ee4375 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py @@ -33,8 +33,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroups" @@ -197,14 +198,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr.py b/providers/tests/system/amazon/aws/example_emr.py similarity index 97% rename from tests/system/providers/amazon/aws/example_emr.py rename to providers/tests/system/amazon/aws/example_emr.py index 7d62180d07d9..44c28630eb69 100644 --- a/tests/system/providers/amazon/aws/example_emr.py +++ b/providers/tests/system/amazon/aws/example_emr.py @@ -36,7 +36,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrJobFlowSensor, EmrStepSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr" CONFIG_NAME = "EMR Runtime Role Security Configuration" @@ -226,14 +227,14 @@ def get_step_id(step_ids: list): ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_eks.py b/providers/tests/system/amazon/aws/example_emr_eks.py similarity index 98% rename from tests/system/providers/amazon/aws/example_emr_eks.py rename to providers/tests/system/amazon/aws/example_emr_eks.py index 8b182d42440b..d95645fd306a 100644 --- a/tests/system/providers/amazon/aws/example_emr_eks.py +++ b/providers/tests/system/amazon/aws/example_emr_eks.py @@ -36,7 +36,8 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.providers.amazon.aws.sensors.emr import EmrContainerSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_eks" @@ -319,13 +320,13 @@ def delete_virtual_cluster(virtual_cluster_id): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_notebook_execution.py b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py similarity index 95% rename from tests/system/providers/amazon/aws/example_emr_notebook_execution.py rename to providers/tests/system/amazon/aws/example_emr_notebook_execution.py index 86fc3055a162..48d2b14c9657 100644 --- a/tests/system/providers/amazon/aws/example_emr_notebook_execution.py +++ b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -27,7 +27,8 @@ EmrStopNotebookExecutionOperator, ) from airflow.providers.amazon.aws.sensors.emr import EmrNotebookExecutionSensor -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_notebook" # Externally fetched variables: @@ -111,13 +112,13 @@ wait_for_execution_finish, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_serverless.py b/providers/tests/system/amazon/aws/example_emr_serverless.py similarity index 96% rename from tests/system/providers/amazon/aws/example_emr_serverless.py rename to providers/tests/system/amazon/aws/example_emr_serverless.py index 1bca31de5f39..4901fa673605 100644 --- a/tests/system/providers/amazon/aws/example_emr_serverless.py +++ b/providers/tests/system/amazon/aws/example_emr_serverless.py @@ -32,7 +32,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrServerlessApplicationSensor, EmrServerlessJobSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_serverless" @@ -154,13 +155,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eventbridge.py b/providers/tests/system/amazon/aws/example_eventbridge.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eventbridge.py rename to providers/tests/system/amazon/aws/example_eventbridge.py index 9b3bb922e166..5470bd0ca70c 100644 --- a/tests/system/providers/amazon/aws/example_eventbridge.py +++ b/providers/tests/system/amazon/aws/example_eventbridge.py @@ -26,7 +26,8 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_eventbridge" ENTRIES = [ @@ -80,7 +81,7 @@ chain(test_context, put_events, put_rule, enable_rule, disable_rule) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ftp_to_s3.py b/providers/tests/system/amazon/aws/example_ftp_to_s3.py similarity index 92% rename from tests/system/providers/amazon/aws/example_ftp_to_s3.py rename to providers/tests/system/amazon/aws/example_ftp_to_s3.py index ef62bdc06a73..98a37e197f8f 100644 --- a/tests/system/providers/amazon/aws/example_ftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_ftp_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -71,13 +72,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_gcs_to_s3.py b/providers/tests/system/amazon/aws/example_gcs_to_s3.py similarity index 95% rename from tests/system/providers/amazon/aws/example_gcs_to_s3.py rename to providers/tests/system/amazon/aws/example_gcs_to_s3.py index 198f9729feb3..97ed8dfda3a9 100644 --- a/tests/system/providers/amazon/aws/example_gcs_to_s3.py +++ b/providers/tests/system/amazon/aws/example_gcs_to_s3.py @@ -32,7 +32,8 @@ GCSDeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: GCP_PROJECT_ID = "GCP_PROJECT_ID" @@ -114,13 +115,13 @@ def upload_gcs_file(bucket_name: str, object_name: str, user_project: str): delete_gcs_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glacier_to_gcs.py b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py similarity index 95% rename from tests/system/providers/amazon/aws/example_glacier_to_gcs.py rename to providers/tests/system/amazon/aws/example_glacier_to_gcs.py index e57029f33aec..28c061ae057c 100644 --- a/tests/system/providers/amazon/aws/example_glacier_to_gcs.py +++ b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py @@ -30,7 +30,8 @@ from airflow.providers.amazon.aws.sensors.glacier import GlacierJobOperationSensor from airflow.providers.amazon.aws.transfers.glacier_to_gcs import GlacierToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -107,13 +108,13 @@ def delete_vault(vault_name): delete_vault(vault_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue.py b/providers/tests/system/amazon/aws/example_glue.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue.py rename to providers/tests/system/amazon/aws/example_glue.py index c16aaf8677c1..5314d26eff80 100644 --- a/tests/system/providers/amazon/aws/example_glue.py +++ b/providers/tests/system/amazon/aws/example_glue.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.sensors.glue_catalog_partition import GlueCatalogPartitionSensor from airflow.providers.amazon.aws.sensors.glue_crawler import GlueCrawlerSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs if TYPE_CHECKING: from botocore.client import BaseClient @@ -216,13 +217,13 @@ def glue_cleanup(crawler_name: str, job_name: str, db_name: str) -> None: log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_data_quality.py b/providers/tests/system/amazon/aws/example_glue_data_quality.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue_data_quality.py rename to providers/tests/system/amazon/aws/example_glue_data_quality.py index e9b8f418e2f6..4d05d6e22b43 100644 --- a/tests/system/providers/amazon/aws/example_glue_data_quality.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality.py @@ -34,7 +34,8 @@ ) from airflow.providers.amazon.aws.sensors.glue import GlueDataQualityRuleSetEvaluationRunSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -197,13 +198,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py rename to providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py index 231750e97134..fdb3cb8e63ad 100644 --- a/tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py @@ -37,7 +37,8 @@ GlueDataQualityRuleSetEvaluationRunSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -196,13 +197,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_databrew.py b/providers/tests/system/amazon/aws/example_glue_databrew.py similarity index 95% rename from tests/system/providers/amazon/aws/example_glue_databrew.py rename to providers/tests/system/amazon/aws/example_glue_databrew.py index 251c7611b3be..3218b465c290 100644 --- a/tests/system/providers/amazon/aws/example_glue_databrew.py +++ b/providers/tests/system/amazon/aws/example_glue_databrew.py @@ -31,7 +31,8 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_glue_databrew" @@ -154,14 +155,14 @@ def delete_job(job_name: str): delete_output_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py rename to providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index b736308324f6..7ea7ae9d072f 100644 --- a/tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -29,7 +29,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -82,13 +83,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py similarity index 97% rename from tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py rename to providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py index f6195f78018d..ae4f721567ce 100644 --- a/tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py @@ -61,7 +61,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_google_api_youtube_to_s3" @@ -196,13 +197,13 @@ def transform_video_ids(**kwargs): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_hive_to_dynamodb.py b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py similarity index 96% rename from tests/system/providers/amazon/aws/example_hive_to_dynamodb.py rename to providers/tests/system/amazon/aws/example_hive_to_dynamodb.py index 6b35b9dfca8e..5582c13b855f 100644 --- a/tests/system/providers/amazon/aws/example_hive_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py @@ -32,7 +32,8 @@ from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator from airflow.utils import db from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_hive_to_dynamodb" @@ -149,13 +150,13 @@ def configure_hive_connection(connection_id, hostname): delete_dynamodb_table(dynamodb_table_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_http_to_s3.py b/providers/tests/system/amazon/aws/example_http_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_http_to_s3.py rename to providers/tests/system/amazon/aws/example_http_to_s3.py index d6424f980215..cfbc131ee568 100644 --- a/tests/system/providers/amazon/aws/example_http_to_s3.py +++ b/providers/tests/system/amazon/aws/example_http_to_s3.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -118,13 +119,13 @@ def create_connection(conn_id_name: str): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py rename to providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py index 7b7fc1bb387d..b43023115cf4 100644 --- a/tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -28,7 +28,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_imap_attachment_to_s3" @@ -89,13 +90,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_kinesis_analytics.py b/providers/tests/system/amazon/aws/example_kinesis_analytics.py similarity index 97% rename from tests/system/providers/amazon/aws/example_kinesis_analytics.py rename to providers/tests/system/amazon/aws/example_kinesis_analytics.py index 007a35407437..600710cc43b4 100644 --- a/tests/system/providers/amazon/aws/example_kinesis_analytics.py +++ b/providers/tests/system/amazon/aws/example_kinesis_analytics.py @@ -43,7 +43,8 @@ ) from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -262,13 +263,13 @@ def delete_kinesis_stream(stream: str, region: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_lambda.py b/providers/tests/system/amazon/aws/example_lambda.py similarity index 95% rename from tests/system/providers/amazon/aws/example_lambda.py rename to providers/tests/system/amazon/aws/example_lambda.py index 4ba74a26d5f2..fd346a34589f 100644 --- a/tests/system/providers/amazon/aws/example_lambda.py +++ b/providers/tests/system/amazon/aws/example_lambda.py @@ -32,7 +32,8 @@ ) from airflow.providers.amazon.aws.sensors.lambda_function import LambdaFunctionStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_lambda" @@ -128,13 +129,13 @@ def delete_lambda(function_name: str): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_local_to_s3.py b/providers/tests/system/amazon/aws/example_local_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_local_to_s3.py rename to providers/tests/system/amazon/aws/example_local_to_s3.py index da979bcd3785..47344c429b95 100644 --- a/tests/system/providers/amazon/aws/example_local_to_s3.py +++ b/providers/tests/system/amazon/aws/example_local_to_s3.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -90,14 +91,14 @@ def delete_temp_file(): delete_temp_file(), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_mongo_to_s3.py b/providers/tests/system/amazon/aws/example_mongo_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_mongo_to_s3.py rename to providers/tests/system/amazon/aws/example_mongo_to_s3.py index 33bcd171c0d8..3e5bc318e5c1 100644 --- a/tests/system/providers/amazon/aws/example_mongo_to_s3.py +++ b/providers/tests/system/amazon/aws/example_mongo_to_s3.py @@ -22,7 +22,8 @@ from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator from airflow.utils.dates import datetime from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_mongo_to_s3" @@ -82,13 +83,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_neptune.py b/providers/tests/system/amazon/aws/example_neptune.py similarity index 93% rename from tests/system/providers/amazon/aws/example_neptune.py rename to providers/tests/system/amazon/aws/example_neptune.py index 81276c2dcd86..4685840b4259 100644 --- a/tests/system/providers/amazon/aws/example_neptune.py +++ b/providers/tests/system/amazon/aws/example_neptune.py @@ -26,7 +26,8 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_neptune" @@ -76,13 +77,13 @@ def delete_cluster(cluster_id): # TEST TEARDOWN delete_cluster(cluster_id), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_quicksight.py b/providers/tests/system/amazon/aws/example_quicksight.py similarity index 97% rename from tests/system/providers/amazon/aws/example_quicksight.py rename to providers/tests/system/amazon/aws/example_quicksight.py index ccf4746122f6..ec5cd3800314 100644 --- a/tests/system/providers/amazon/aws/example_quicksight.py +++ b/providers/tests/system/amazon/aws/example_quicksight.py @@ -33,7 +33,8 @@ ) from airflow.providers.amazon.aws.sensors.quicksight import QuickSightSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder """ Prerequisites: @@ -217,13 +218,13 @@ def delete_ingestion(aws_account_id: str, dataset_name: str, ingestion_name: str delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_event.py b/providers/tests/system/amazon/aws/example_rds_event.py similarity index 95% rename from tests/system/providers/amazon/aws/example_rds_event.py rename to providers/tests/system/amazon/aws/example_rds_event.py index 0d397f43066e..79d70c9d394e 100644 --- a/tests/system/providers/amazon/aws/example_rds_event.py +++ b/providers/tests/system/amazon/aws/example_rds_event.py @@ -31,7 +31,8 @@ RdsDeleteEventSubscriptionOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_event" @@ -118,14 +119,14 @@ def delete_sns_topic(topic_arn) -> None: delete_sns_topic(sns_topic), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_export.py b/providers/tests/system/amazon/aws/example_rds_export.py similarity index 96% rename from tests/system/providers/amazon/aws/example_rds_export.py rename to providers/tests/system/amazon/aws/example_rds_export.py index 385a1871e7e2..34638f030f50 100644 --- a/tests/system/providers/amazon/aws/example_rds_export.py +++ b/providers/tests/system/amazon/aws/example_rds_export.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor, RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_export" @@ -177,13 +178,13 @@ def get_snapshot_arn(snapshot_name: str) -> str: delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_instance.py b/providers/tests/system/amazon/aws/example_rds_instance.py similarity index 95% rename from tests/system/providers/amazon/aws/example_rds_instance.py rename to providers/tests/system/amazon/aws/example_rds_instance.py index f917fb128486..ce49f73c1f4a 100644 --- a/tests/system/providers/amazon/aws/example_rds_instance.py +++ b/providers/tests/system/amazon/aws/example_rds_instance.py @@ -28,7 +28,8 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsDbSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -110,14 +111,14 @@ delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_snapshot.py b/providers/tests/system/amazon/aws/example_rds_snapshot.py similarity index 96% rename from tests/system/providers/amazon/aws/example_rds_snapshot.py rename to providers/tests/system/amazon/aws/example_rds_snapshot.py index 5585f339feec..c58c1db54d53 100644 --- a/tests/system/providers/amazon/aws/example_rds_snapshot.py +++ b/providers/tests/system/amazon/aws/example_rds_snapshot.py @@ -29,7 +29,8 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_snapshot" @@ -137,14 +138,14 @@ delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_redshift.py b/providers/tests/system/amazon/aws/example_redshift.py similarity index 97% rename from tests/system/providers/amazon/aws/example_redshift.py rename to providers/tests/system/amazon/aws/example_redshift.py index 67b822d41ef5..986bce5a07b5 100644 --- a/tests/system/providers/amazon/aws/example_redshift.py +++ b/providers/tests/system/amazon/aws/example_redshift.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift" @@ -247,13 +248,13 @@ delete_cluster_snapshot, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_redshift_s3_transfers.py b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py similarity index 98% rename from tests/system/providers/amazon/aws/example_redshift_s3_transfers.py rename to providers/tests/system/amazon/aws/example_redshift_s3_transfers.py index 9fb989ec5369..2a553eae8cb6 100644 --- a/tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift_to_s3" @@ -317,13 +318,13 @@ def _insert_data(table_name: str) -> str: delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3.py b/providers/tests/system/amazon/aws/example_s3.py similarity index 98% rename from tests/system/providers/amazon/aws/example_s3.py rename to providers/tests/system/amazon/aws/example_s3.py index 06f60b4ac99c..d01607602287 100644 --- a/tests/system/providers/amazon/aws/example_s3.py +++ b/providers/tests/system/amazon/aws/example_s3.py @@ -36,7 +36,8 @@ ) from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, S3KeysUnchangedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_s3" @@ -322,13 +323,13 @@ def check_fn(files: list, **kwargs) -> bool: delete_bucket, delete_bucket_2, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_dynamodb.py b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py similarity index 96% rename from tests/system/providers/amazon/aws/example_s3_to_dynamodb.py rename to providers/tests/system/amazon/aws/example_s3_to_dynamodb.py index b415ffad7bdc..efd762cfd93c 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py @@ -31,7 +31,8 @@ ) from airflow.providers.amazon.aws.transfers.s3_to_dynamodb import S3ToDynamoDBOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) @@ -180,13 +181,13 @@ def delete_dynamodb_table(table_name: str): delete_new_table, delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_ftp.py b/providers/tests/system/amazon/aws/example_s3_to_ftp.py similarity index 92% rename from tests/system/providers/amazon/aws/example_s3_to_ftp.py rename to providers/tests/system/amazon/aws/example_s3_to_ftp.py index bf909ddc8a21..0ffb4722ba86 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_ftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_ftp.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_sftp.py b/providers/tests/system/amazon/aws/example_s3_to_sftp.py similarity index 92% rename from tests/system/providers/amazon/aws/example_s3_to_sftp.py rename to providers/tests/system/amazon/aws/example_s3_to_sftp.py index 78825d715146..5270d3ec7d4b 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_sftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sftp.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_sql.py b/providers/tests/system/amazon/aws/example_s3_to_sql.py similarity index 97% rename from tests/system/providers/amazon/aws/example_s3_to_sql.py rename to providers/tests/system/amazon/aws/example_s3_to_sql.py index 5e5e2ba010e8..e7e8a5e9543c 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_sql.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sql.py @@ -39,8 +39,9 @@ from airflow.providers.amazon.aws.transfers.s3_to_sql import S3ToSqlOperator from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.utils.watcher import watcher + +from dev.tests_common.test_utils.watcher import watcher +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # Externally fetched variables: SECURITY_GROUP_KEY = "SECURITY_GROUP" @@ -253,7 +254,7 @@ def parse_csv_to_generator(filepath): list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker.py b/providers/tests/system/amazon/aws/example_sagemaker.py similarity index 99% rename from tests/system/providers/amazon/aws/example_sagemaker.py rename to providers/tests/system/amazon/aws/example_sagemaker.py index 15b756494eac..acb05400b951 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker.py +++ b/providers/tests/system/amazon/aws/example_sagemaker.py @@ -52,7 +52,8 @@ SageMakerTuningSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs logger = logging.getLogger(__name__) @@ -645,13 +646,13 @@ def delete_docker_image(image_name): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_endpoint.py b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py similarity index 97% rename from tests/system/providers/amazon/aws/example_sagemaker_endpoint.py rename to providers/tests/system/amazon/aws/example_sagemaker_endpoint.py index 226b690cbf40..fecaa92264b6 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py @@ -38,7 +38,8 @@ ) from airflow.providers.amazon.aws.sensors.sagemaker import SageMakerEndpointSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_sagemaker_endpoint" @@ -58,7 +59,7 @@ } # For an example of how to obtain the following train and test data, please see -# https://github.com/apache/airflow/blob/main/tests/system/providers/amazon/aws/example_sagemaker.py +# https://github.com/apache/airflow/blob/main/providers/tests/system/amazon/aws/example_sagemaker.py TRAIN_DATA = "0,4.9,2.5,4.5,1.7\n1,7.0,3.2,4.7,1.4\n0,7.3,2.9,6.3,1.8\n2,5.1,3.5,1.4,0.2\n" SAMPLE_TEST_DATA = "6.4,3.2,4.5,1.5" @@ -290,13 +291,13 @@ def set_up(env_id, role_arn, ti=None): archive_logs(f"/aws/sagemaker/Endpoints/{test_setup['endpoint_name']}"), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_notebook.py b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py similarity index 94% rename from tests/system/providers/amazon/aws/example_sagemaker_notebook.py rename to providers/tests/system/amazon/aws/example_sagemaker_notebook.py index b61bfa822bb0..76d35dc2b147 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_notebook.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -26,7 +26,8 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_notebook" @@ -96,13 +97,13 @@ delete_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py similarity index 97% rename from tests/system/providers/amazon/aws/example_sagemaker_pipeline.py rename to providers/tests/system/amazon/aws/example_sagemaker_pipeline.py index 3f3251170240..614fb5c13cc9 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -31,8 +31,9 @@ SageMakerPipelineSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.example_sagemaker import delete_experiments -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.example_sagemaker import delete_experiments +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_pipeline" @@ -116,13 +117,13 @@ def delete_pipeline(name: str): delete_experiments([pipeline_name]), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_salesforce_to_s3.py b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_salesforce_to_s3.py rename to providers/tests/system/amazon/aws/example_salesforce_to_s3.py index 112be85c5ad2..7ee15f7030be 100644 --- a/tests/system/providers/amazon/aws/example_salesforce_to_s3.py +++ b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -28,7 +28,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -77,13 +78,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sftp_to_s3.py b/providers/tests/system/amazon/aws/example_sftp_to_s3.py similarity index 92% rename from tests/system/providers/amazon/aws/example_sftp_to_s3.py rename to providers/tests/system/amazon/aws/example_sftp_to_s3.py index c5cae1ebfa68..85dac988cb82 100644 --- a/tests/system/providers/amazon/aws/example_sftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sftp_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sns.py b/providers/tests/system/amazon/aws/example_sns.py similarity index 92% rename from tests/system/providers/amazon/aws/example_sns.py rename to providers/tests/system/amazon/aws/example_sns.py index 4cfb2b89153c..d00227536da0 100644 --- a/tests/system/providers/amazon/aws/example_sns.py +++ b/providers/tests/system/amazon/aws/example_sns.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -74,14 +75,14 @@ def delete_topic(topic_arn) -> None: delete_topic(create_sns_topic), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sql_to_s3.py b/providers/tests/system/amazon/aws/example_sql_to_s3.py similarity index 96% rename from tests/system/providers/amazon/aws/example_sql_to_s3.py rename to providers/tests/system/amazon/aws/example_sql_to_s3.py index d52ca3630b9d..dd333073a043 100644 --- a/tests/system/providers/amazon/aws/example_sql_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sql_to_s3.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sql_to_s3" @@ -198,14 +199,14 @@ def create_connection(conn_id_name: str, cluster_id: str): delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sqs.py b/providers/tests/system/amazon/aws/example_sqs.py similarity index 94% rename from tests/system/providers/amazon/aws/example_sqs.py rename to providers/tests/system/amazon/aws/example_sqs.py index 0a14d5c44640..2aba6a580c4d 100644 --- a/tests/system/providers/amazon/aws/example_sqs.py +++ b/providers/tests/system/amazon/aws/example_sqs.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator from airflow.providers.amazon.aws.sensors.sqs import SqsSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -99,14 +100,14 @@ def delete_queue(queue_url): delete_queue(sqs_queue), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_step_functions.py b/providers/tests/system/amazon/aws/example_step_functions.py similarity index 95% rename from tests/system/providers/amazon/aws/example_step_functions.py rename to providers/tests/system/amazon/aws/example_step_functions.py index 1c0e90979d69..beeb12528c7a 100644 --- a/tests/system/providers/amazon/aws/example_step_functions.py +++ b/providers/tests/system/amazon/aws/example_step_functions.py @@ -28,7 +28,8 @@ StepFunctionStartExecutionOperator, ) from airflow.providers.amazon.aws.sensors.step_function import StepFunctionExecutionSensor -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_step_functions" @@ -111,13 +112,13 @@ def delete_state_machine(state_machine_arn): delete_state_machine(state_machine_arn), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/trino/assets/__init__.py b/providers/tests/system/amazon/aws/tests/__init__.py similarity index 100% rename from tests/providers/trino/assets/__init__.py rename to providers/tests/system/amazon/aws/tests/__init__.py diff --git a/tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py similarity index 91% rename from tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py rename to providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py index dac7398a1ba8..774aec21b2b6 100644 --- a/tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py +++ b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py @@ -23,9 +23,10 @@ import pytest from airflow.www import app as application -from tests.system.providers.amazon.aws.utils import set_env_id -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response +from providers.tests.system.amazon.aws.utils import set_env_id SAML_METADATA_URL = "/saml/metadata" SAML_METADATA_PARSED = { @@ -124,11 +125,14 @@ def base_app(region_name, avp_policy_store_id): ("aws_auth_manager", "avp_policy_store_id"): avp_policy_store_id, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + ): mock_parser.parse_remote.return_value = SAML_METADATA_PARSED yield mock_init_saml_auth diff --git a/tests/system/providers/amazon/aws/utils/__init__.py b/providers/tests/system/amazon/aws/utils/__init__.py similarity index 99% rename from tests/system/providers/amazon/aws/utils/__init__.py rename to providers/tests/system/amazon/aws/utils/__init__.py index 8b4114fc90ad..1b6b5cc47c1d 100644 --- a/tests/system/providers/amazon/aws/utils/__init__.py +++ b/providers/tests/system/amazon/aws/utils/__init__.py @@ -48,7 +48,7 @@ INVALID_ENV_ID_MSG: str = ( "To maximize compatibility, the SYSTEM_TESTS_ENV_ID must be an alphanumeric string " - "which starts with a letter. Please see `tests/system/providers/amazon/README.md`." + "which starts with a letter. Please see `providers/tests/system/amazon/README.md`." ) LOWERCASE_ENV_ID_MSG: str = ( "The provided Environment ID contains uppercase letters and " diff --git a/tests/system/providers/amazon/aws/utils/ec2.py b/providers/tests/system/amazon/aws/utils/ec2.py similarity index 100% rename from tests/system/providers/amazon/aws/utils/ec2.py rename to providers/tests/system/amazon/aws/utils/ec2.py diff --git a/tests/system/providers/amazon/aws/utils/k8s.py b/providers/tests/system/amazon/aws/utils/k8s.py similarity index 100% rename from tests/system/providers/amazon/aws/utils/k8s.py rename to providers/tests/system/amazon/aws/utils/k8s.py diff --git a/tests/providers/trino/operators/__init__.py b/providers/tests/system/apache/__init__.py similarity index 100% rename from tests/providers/trino/operators/__init__.py rename to providers/tests/system/apache/__init__.py diff --git a/tests/providers/tableau/hooks/__init__.py b/providers/tests/system/apache/beam/__init__.py similarity index 100% rename from tests/providers/tableau/hooks/__init__.py rename to providers/tests/system/apache/beam/__init__.py diff --git a/tests/system/providers/apache/beam/example_beam.py b/providers/tests/system/apache/beam/example_beam.py similarity index 95% rename from tests/system/providers/apache/beam/example_beam.py rename to providers/tests/system/apache/beam/example_beam.py index b0c62bbc00ba..4166caa3477a 100644 --- a/tests/system/providers/apache/beam/example_beam.py +++ b/providers/tests/system/apache/beam/example_beam.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, @@ -60,7 +61,7 @@ # [END howto_operator_start_java_direct_runner_pipeline] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_beam_java_flink.py b/providers/tests/system/apache/beam/example_beam_java_flink.py similarity index 94% rename from tests/system/providers/apache/beam/example_beam_java_flink.py rename to providers/tests/system/apache/beam/example_beam_java_flink.py index 42cfad2bdc8c..a68637d8ef8d 100644 --- a/tests/system/providers/apache/beam/example_beam_java_flink.py +++ b/providers/tests/system/apache/beam/example_beam_java_flink.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_FLINK_RUNNER_BUCKET_NAME, GCS_JAR_FLINK_RUNNER_OBJECT_NAME, @@ -59,7 +60,7 @@ jar_to_local_flink_runner >> start_java_pipeline_flink_runner -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_beam_java_spark.py b/providers/tests/system/apache/beam/example_beam_java_spark.py similarity index 94% rename from tests/system/providers/apache/beam/example_beam_java_spark.py rename to providers/tests/system/apache/beam/example_beam_java_spark.py index 29259dd63a25..eb3fea6d16da 100644 --- a/tests/system/providers/apache/beam/example_beam_java_spark.py +++ b/providers/tests/system/apache/beam/example_beam_java_spark.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_SPARK_RUNNER_BUCKET_NAME, GCS_JAR_SPARK_RUNNER_OBJECT_NAME, @@ -59,7 +60,7 @@ jar_to_local_spark_runner >> start_java_pipeline_spark_runner -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_go.py b/providers/tests/system/apache/beam/example_go.py similarity index 96% rename from tests/system/providers/apache/beam/example_go.py rename to providers/tests/system/apache/beam/example_go.py index f5fc70ff1e91..792eee088bf6 100644 --- a/tests/system/providers/apache/beam/example_go.py +++ b/providers/tests/system/apache/beam/example_go.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO, @@ -102,7 +103,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_go_dataflow.py b/providers/tests/system/apache/beam/example_go_dataflow.py similarity index 95% rename from tests/system/providers/apache/beam/example_go_dataflow.py rename to providers/tests/system/apache/beam/example_go_dataflow.py index 4cc81d926304..35a63cb56e1e 100644 --- a/tests/system/providers/apache/beam/example_go_dataflow.py +++ b/providers/tests/system/apache/beam/example_go_dataflow.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO_DATAFLOW_ASYNC, @@ -75,7 +76,7 @@ # [END howto_operator_start_go_dataflow_runner_pipeline_async_gcs_file] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_java_dataflow.py b/providers/tests/system/apache/beam/example_java_dataflow.py similarity index 95% rename from tests/system/providers/apache/beam/example_java_dataflow.py rename to providers/tests/system/apache/beam/example_java_dataflow.py index 0a941eb32a5a..334142dfd315 100644 --- a/tests/system/providers/apache/beam/example_java_dataflow.py +++ b/providers/tests/system/apache/beam/example_java_dataflow.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, GCS_OUTPUT, @@ -65,7 +66,7 @@ # [END howto_operator_start_java_dataflow_runner_pipeline] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python.py b/providers/tests/system/apache/beam/example_python.py similarity index 97% rename from tests/system/providers/apache/beam/example_python.py rename to providers/tests/system/apache/beam/example_python.py index 12907df08f5a..d685cb33d2da 100644 --- a/tests/system/providers/apache/beam/example_python.py +++ b/providers/tests/system/apache/beam/example_python.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -119,7 +120,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python_async.py b/providers/tests/system/apache/beam/example_python_async.py similarity index 97% rename from tests/system/providers/apache/beam/example_python_async.py rename to providers/tests/system/apache/beam/example_python_async.py index ab2e9a8063ea..8465278b6bf7 100644 --- a/tests/system/providers/apache/beam/example_python_async.py +++ b/providers/tests/system/apache/beam/example_python_async.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -128,7 +129,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python_dataflow.py b/providers/tests/system/apache/beam/example_python_dataflow.py similarity index 96% rename from tests/system/providers/apache/beam/example_python_dataflow.py rename to providers/tests/system/apache/beam/example_python_dataflow.py index 9b849e3900d9..36d6b3b8562e 100644 --- a/tests/system/providers/apache/beam/example_python_dataflow.py +++ b/providers/tests/system/apache/beam/example_python_dataflow.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -78,7 +79,7 @@ # [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/utils.py b/providers/tests/system/apache/beam/utils.py similarity index 100% rename from tests/system/providers/apache/beam/utils.py rename to providers/tests/system/apache/beam/utils.py diff --git a/tests/providers/weaviate/__init__.py b/providers/tests/system/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/weaviate/__init__.py rename to providers/tests/system/apache/cassandra/__init__.py diff --git a/tests/system/providers/apache/cassandra/example_cassandra_dag.py b/providers/tests/system/apache/cassandra/example_cassandra_dag.py similarity index 96% rename from tests/system/providers/apache/cassandra/example_cassandra_dag.py rename to providers/tests/system/apache/cassandra/example_cassandra_dag.py index 79e9918b446b..0b25fa2975db 100644 --- a/tests/system/providers/apache/cassandra/example_cassandra_dag.py +++ b/providers/tests/system/apache/cassandra/example_cassandra_dag.py @@ -50,7 +50,7 @@ record_sensor = CassandraRecordSensor(task_id="cassandra_record_sensor", keys={"p1": "v1", "p2": "v2"}) # [END howto_operator_cassandra_sensors] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/weaviate/hooks/__init__.py b/providers/tests/system/apache/drill/__init__.py similarity index 100% rename from tests/providers/weaviate/hooks/__init__.py rename to providers/tests/system/apache/drill/__init__.py diff --git a/tests/system/providers/apache/drill/example_drill_dag.py b/providers/tests/system/apache/drill/example_drill_dag.py similarity index 95% rename from tests/system/providers/apache/drill/example_drill_dag.py rename to providers/tests/system/apache/drill/example_drill_dag.py index 62f332f318a8..fdaf376d5a16 100644 --- a/tests/system/providers/apache/drill/example_drill_dag.py +++ b/providers/tests/system/apache/drill/example_drill_dag.py @@ -47,7 +47,7 @@ ) # [END howto_operator_drill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/weaviate/operators/__init__.py b/providers/tests/system/apache/druid/__init__.py similarity index 100% rename from tests/providers/weaviate/operators/__init__.py rename to providers/tests/system/apache/druid/__init__.py diff --git a/tests/system/providers/apache/druid/example_druid_dag.py b/providers/tests/system/apache/druid/example_druid_dag.py similarity index 96% rename from tests/system/providers/apache/druid/example_druid_dag.py rename to providers/tests/system/apache/druid/example_druid_dag.py index 1426a12456ba..5e1bfc446bb0 100644 --- a/tests/system/providers/apache/druid/example_druid_dag.py +++ b/providers/tests/system/apache/druid/example_druid_dag.py @@ -55,7 +55,7 @@ """ # [END howto_operator_druid_submit] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/telegram/__init__.py b/providers/tests/system/apache/hive/__init__.py similarity index 100% rename from tests/providers/telegram/__init__.py rename to providers/tests/system/apache/hive/__init__.py diff --git a/tests/system/providers/apache/hive/example_twitter_README.md b/providers/tests/system/apache/hive/example_twitter_README.md similarity index 100% rename from tests/system/providers/apache/hive/example_twitter_README.md rename to providers/tests/system/apache/hive/example_twitter_README.md diff --git a/tests/system/providers/apache/hive/example_twitter_dag.py b/providers/tests/system/apache/hive/example_twitter_dag.py similarity index 97% rename from tests/system/providers/apache/hive/example_twitter_dag.py rename to providers/tests/system/apache/hive/example_twitter_dag.py index 4ceb119ba551..129a8ebf9f77 100644 --- a/tests/system/providers/apache/hive/example_twitter_dag.py +++ b/providers/tests/system/apache/hive/example_twitter_dag.py @@ -154,13 +154,13 @@ def transfer_to_db(): analyze >> load_to_hdfs >> load_to_hive >> hive_to_mysql - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/telegram/hooks/__init__.py b/providers/tests/system/apache/iceberg/__init__.py similarity index 100% rename from tests/providers/telegram/hooks/__init__.py rename to providers/tests/system/apache/iceberg/__init__.py diff --git a/tests/system/providers/apache/iceberg/example_iceberg.py b/providers/tests/system/apache/iceberg/example_iceberg.py similarity index 95% rename from tests/system/providers/apache/iceberg/example_iceberg.py rename to providers/tests/system/apache/iceberg/example_iceberg.py index 41e751624b5c..a4708805b678 100644 --- a/tests/system/providers/apache/iceberg/example_iceberg.py +++ b/providers/tests/system/apache/iceberg/example_iceberg.py @@ -49,7 +49,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/__init__.py b/providers/tests/system/apache/kafka/__init__.py similarity index 100% rename from tests/providers/yandex/__init__.py rename to providers/tests/system/apache/kafka/__init__.py diff --git a/tests/system/providers/apache/kafka/example_dag_event_listener.py b/providers/tests/system/apache/kafka/example_dag_event_listener.py similarity index 97% rename from tests/system/providers/apache/kafka/example_dag_event_listener.py rename to providers/tests/system/apache/kafka/example_dag_event_listener.py index 24d8177ce8ca..eb6866735e83 100644 --- a/tests/system/providers/apache/kafka/example_dag_event_listener.py +++ b/providers/tests/system/apache/kafka/example_dag_event_listener.py @@ -120,7 +120,7 @@ def wait_for_event(message, **context): t0 >> t1 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py similarity index 98% rename from tests/system/providers/apache/kafka/example_dag_hello_kafka.py rename to providers/tests/system/apache/kafka/example_dag_hello_kafka.py index fc7078c3dc5a..5e70d7324a1a 100644 --- a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py +++ b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py @@ -240,7 +240,7 @@ def hello_kafka(): t0 >> t3 >> [t4, t4b] >> t5 >> t6 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/hooks/__init__.py b/providers/tests/system/apache/kylin/__init__.py similarity index 100% rename from tests/providers/yandex/hooks/__init__.py rename to providers/tests/system/apache/kylin/__init__.py diff --git a/tests/system/providers/apache/kylin/example_kylin_dag.py b/providers/tests/system/apache/kylin/example_kylin_dag.py similarity index 96% rename from tests/system/providers/apache/kylin/example_kylin_dag.py rename to providers/tests/system/apache/kylin/example_kylin_dag.py index c851ad7c5e3d..1bfec2cd1a7d 100644 --- a/tests/system/providers/apache/kylin/example_kylin_dag.py +++ b/providers/tests/system/apache/kylin/example_kylin_dag.py @@ -109,13 +109,13 @@ def gen_build_time(): # gen_build_time >> refresh_task1 # gen_build_time >> merge_task # gen_build_time >> build_task3 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/links/__init__.py b/providers/tests/system/apache/livy/__init__.py similarity index 100% rename from tests/providers/yandex/links/__init__.py rename to providers/tests/system/apache/livy/__init__.py diff --git a/tests/system/providers/apache/livy/example_livy.py b/providers/tests/system/apache/livy/example_livy.py similarity index 95% rename from tests/system/providers/apache/livy/example_livy.py rename to providers/tests/system/apache/livy/example_livy.py index 507bbaefede4..09f8f7e244e0 100644 --- a/tests/system/providers/apache/livy/example_livy.py +++ b/providers/tests/system/apache/livy/example_livy.py @@ -73,13 +73,13 @@ livy_java_task_deferrable >> livy_python_task_deferrable # [END create_livy_deferrable] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/operators/__init__.py b/providers/tests/system/apache/pig/__init__.py similarity index 100% rename from tests/providers/yandex/operators/__init__.py rename to providers/tests/system/apache/pig/__init__.py diff --git a/tests/system/providers/apache/pig/example_pig.py b/providers/tests/system/apache/pig/example_pig.py similarity index 95% rename from tests/system/providers/apache/pig/example_pig.py rename to providers/tests/system/apache/pig/example_pig.py index 938987f32e78..bb556a85111d 100644 --- a/tests/system/providers/apache/pig/example_pig.py +++ b/providers/tests/system/apache/pig/example_pig.py @@ -44,7 +44,7 @@ # [END create_pig] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/secrets/__init__.py b/providers/tests/system/apache/pinot/__init__.py similarity index 100% rename from tests/providers/yandex/secrets/__init__.py rename to providers/tests/system/apache/pinot/__init__.py diff --git a/tests/system/providers/apache/pinot/example_pinot_dag.py b/providers/tests/system/apache/pinot/example_pinot_dag.py similarity index 96% rename from tests/system/providers/apache/pinot/example_pinot_dag.py rename to providers/tests/system/apache/pinot/example_pinot_dag.py index eecd6a4c2abe..d95022f36691 100644 --- a/tests/system/providers/apache/pinot/example_pinot_dag.py +++ b/providers/tests/system/apache/pinot/example_pinot_dag.py @@ -52,7 +52,7 @@ def pinot_dbi_api(): pinot_admin() pinot_dbi_api() -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/utils/__init__.py b/providers/tests/system/apache/spark/__init__.py similarity index 100% rename from tests/providers/yandex/utils/__init__.py rename to providers/tests/system/apache/spark/__init__.py diff --git a/tests/system/providers/apache/spark/example_pyspark.py b/providers/tests/system/apache/spark/example_pyspark.py similarity index 96% rename from tests/system/providers/apache/spark/example_pyspark.py rename to providers/tests/system/apache/spark/example_pyspark.py index cc18911a38c7..0ca14a76f48f 100644 --- a/tests/system/providers/apache/spark/example_pyspark.py +++ b/providers/tests/system/apache/spark/example_pyspark.py @@ -70,7 +70,7 @@ def print_df(df: pd.DataFrame): dag = example_pyspark() # type: ignore -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/spark/example_spark_dag.py b/providers/tests/system/apache/spark/example_spark_dag.py similarity index 97% rename from tests/system/providers/apache/spark/example_spark_dag.py rename to providers/tests/system/apache/spark/example_spark_dag.py index 00bb415585c0..b68fc2cf761a 100644 --- a/tests/system/providers/apache/spark/example_spark_dag.py +++ b/providers/tests/system/apache/spark/example_spark_dag.py @@ -75,7 +75,7 @@ ) # [END howto_operator_spark_sql] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/ydb/__init__.py b/providers/tests/system/asana/__init__.py similarity index 100% rename from tests/providers/ydb/__init__.py rename to providers/tests/system/asana/__init__.py diff --git a/tests/system/providers/asana/example_asana.py b/providers/tests/system/asana/example_asana.py similarity index 97% rename from tests/system/providers/asana/example_asana.py rename to providers/tests/system/asana/example_asana.py index 79a3636a1cb7..ca68b6cc3dd2 100644 --- a/tests/system/providers/asana/example_asana.py +++ b/providers/tests/system/asana/example_asana.py @@ -100,13 +100,13 @@ create >> find >> update >> delete # [END asana_example_dag] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/ydb/hooks/__init__.py b/providers/tests/system/cncf/__init__.py similarity index 100% rename from tests/providers/ydb/hooks/__init__.py rename to providers/tests/system/cncf/__init__.py diff --git a/tests/providers/ydb/operators/__init__.py b/providers/tests/system/cncf/kubernetes/__init__.py similarity index 100% rename from tests/providers/ydb/operators/__init__.py rename to providers/tests/system/cncf/kubernetes/__init__.py diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_kubernetes.py similarity index 97% rename from tests/system/providers/cncf/kubernetes/example_kubernetes.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes.py index 3756d0c4e21d..bed3b9da746b 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes.py @@ -167,13 +167,13 @@ write_xcom >> pod_task_xcom_result # [END howto_operator_k8s_write_xcom] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py similarity index 98% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_async.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_async.py index cb3d25a33fcb..43eb0a1f919c 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py @@ -198,13 +198,13 @@ write_xcom_async >> pod_task_xcom_result_async # [END howto_operator_k8s_write_xcom_async] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py similarity index 97% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py index 20fd7d5f74fa..088a3fdae2ed 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py @@ -63,7 +63,7 @@ def print_pattern(): # [END howto_operator_kubernetes] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_job.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py similarity index 95% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_job.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_job.py index bf017120674b..1a2432c13589 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py @@ -94,13 +94,13 @@ k8s_job >> update_job >> delete_job_task k8s_job_def >> delete_job_task_def - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py similarity index 94% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py index 2133d6e7a79a..1c24a533a85f 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py @@ -72,13 +72,13 @@ t1 >> t2 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py similarity index 95% rename from tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py rename to providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py index 4584d653e4fa..072cffeb90f0 100644 --- a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py @@ -76,13 +76,13 @@ t1 >> t2 # [END SparkKubernetesOperator_DAG] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml similarity index 100% rename from tests/system/providers/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml rename to providers/tests/system/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml diff --git a/tests/system/providers/cncf/kubernetes/spark_job_template.yaml b/providers/tests/system/cncf/kubernetes/spark_job_template.yaml similarity index 100% rename from tests/system/providers/cncf/kubernetes/spark_job_template.yaml rename to providers/tests/system/cncf/kubernetes/spark_job_template.yaml diff --git a/tests/providers/ydb/utils/__init__.py b/providers/tests/system/cohere/__init__.py similarity index 100% rename from tests/providers/ydb/utils/__init__.py rename to providers/tests/system/cohere/__init__.py diff --git a/tests/system/providers/cohere/example_cohere_embedding_operator.py b/providers/tests/system/cohere/example_cohere_embedding_operator.py similarity index 96% rename from tests/system/providers/cohere/example_cohere_embedding_operator.py rename to providers/tests/system/cohere/example_cohere_embedding_operator.py index ec97ee91e57c..9686724f2cea 100644 --- a/tests/system/providers/cohere/example_cohere_embedding_operator.py +++ b/providers/tests/system/cohere/example_cohere_embedding_operator.py @@ -34,7 +34,7 @@ # [END howto_operator_cohere_embedding] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/zendesk/__init__.py b/providers/tests/system/common/__init__.py similarity index 100% rename from tests/providers/zendesk/__init__.py rename to providers/tests/system/common/__init__.py diff --git a/tests/providers/zendesk/hooks/__init__.py b/providers/tests/system/common/io/__init__.py similarity index 100% rename from tests/providers/zendesk/hooks/__init__.py rename to providers/tests/system/common/io/__init__.py diff --git a/tests/system/providers/common/io/example_file_transfer_local_to_s3.py b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py similarity index 94% rename from tests/system/providers/common/io/example_file_transfer_local_to_s3.py rename to providers/tests/system/common/io/example_file_transfer_local_to_s3.py index 13c495e62097..0b90a75f2e6e 100644 --- a/tests/system/providers/common/io/example_file_transfer_local_to_s3.py +++ b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py @@ -76,12 +76,12 @@ def remove_bucket(): temp_file >> transfer >> remove_bucket() >> delete_temp_file(temp_file_path) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/airbyte/__init__.py b/providers/tests/system/common/sql/__init__.py similarity index 100% rename from tests/system/providers/airbyte/__init__.py rename to providers/tests/system/common/sql/__init__.py diff --git a/tests/system/providers/common/sql/example_sql_column_table_check.py b/providers/tests/system/common/sql/example_sql_column_table_check.py similarity index 97% rename from tests/system/providers/common/sql/example_sql_column_table_check.py rename to providers/tests/system/common/sql/example_sql_column_table_check.py index 2790d092a2b4..c48dc60bfc63 100644 --- a/tests/system/providers/common/sql/example_sql_column_table_check.py +++ b/providers/tests/system/common/sql/example_sql_column_table_check.py @@ -79,7 +79,7 @@ column_check >> row_count_check -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/sql/example_sql_execute_query.py b/providers/tests/system/common/sql/example_sql_execute_query.py similarity index 96% rename from tests/system/providers/common/sql/example_sql_execute_query.py rename to providers/tests/system/common/sql/example_sql_execute_query.py index 694ca3f2ef99..8dced19cef71 100644 --- a/tests/system/providers/common/sql/example_sql_execute_query.py +++ b/providers/tests/system/common/sql/example_sql_execute_query.py @@ -56,7 +56,7 @@ # [END howto_operator_sql_execute_query] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/__init__.py b/providers/tests/system/databricks/__init__.py similarity index 100% rename from tests/system/providers/alibaba/__init__.py rename to providers/tests/system/databricks/__init__.py diff --git a/tests/system/providers/databricks/example_databricks.py b/providers/tests/system/databricks/example_databricks.py similarity index 98% rename from tests/system/providers/databricks/example_databricks.py rename to providers/tests/system/databricks/example_databricks.py index 82c5d313421a..00d9969bd99b 100644 --- a/tests/system/providers/databricks/example_databricks.py +++ b/providers/tests/system/databricks/example_databricks.py @@ -238,13 +238,13 @@ ) # [END howto_operator_databricks_task_sql] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_repos.py b/providers/tests/system/databricks/example_databricks_repos.py similarity index 95% rename from tests/system/providers/databricks/example_databricks_repos.py rename to providers/tests/system/databricks/example_databricks_repos.py index dd583c67b9c9..d07226e76ed0 100644 --- a/tests/system/providers/databricks/example_databricks_repos.py +++ b/providers/tests/system/databricks/example_databricks_repos.py @@ -78,13 +78,13 @@ (create_repo >> update_repo >> notebook_task >> delete_repo) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_sensors.py b/providers/tests/system/databricks/example_databricks_sensors.py similarity index 96% rename from tests/system/providers/databricks/example_databricks_sensors.py rename to providers/tests/system/databricks/example_databricks_sensors.py index fd572a6bd905..ea233a79395b 100644 --- a/tests/system/providers/databricks/example_databricks_sensors.py +++ b/providers/tests/system/databricks/example_databricks_sensors.py @@ -88,7 +88,7 @@ # runs, else all the subsequent DAG tasks and the DAG are marked as failed. (sql_sensor >> partition_sensor) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This example does not need a watcher in order to properly mark success/failure # since it is a single task, but it is given here as an example for users to @@ -96,7 +96,7 @@ # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_sql.py b/providers/tests/system/databricks/example_databricks_sql.py similarity index 97% rename from tests/system/providers/databricks/example_databricks_sql.py rename to providers/tests/system/databricks/example_databricks_sql.py index 3f7ed0858c82..f08ce3cd56b6 100644 --- a/tests/system/providers/databricks/example_databricks_sql.py +++ b/providers/tests/system/databricks/example_databricks_sql.py @@ -113,13 +113,13 @@ (create >> create_file >> import_csv >> select >> select_into_file) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_workflow.py b/providers/tests/system/databricks/example_databricks_workflow.py similarity index 97% rename from tests/system/providers/databricks/example_databricks_workflow.py rename to providers/tests/system/databricks/example_databricks_workflow.py index 1cfd81e9c7ec..3a6c829520d8 100644 --- a/tests/system/providers/databricks/example_databricks_workflow.py +++ b/providers/tests/system/databricks/example_databricks_workflow.py @@ -143,13 +143,13 @@ notebook_1 >> notebook_2 >> task_operator_nb_1 >> sql_query # [END howto_databricks_workflow_notebook] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/tests/__init__.py b/providers/tests/system/dbt/__init__.py similarity index 100% rename from tests/system/providers/amazon/aws/tests/__init__.py rename to providers/tests/system/dbt/__init__.py diff --git a/tests/system/providers/apache/cassandra/__init__.py b/providers/tests/system/dbt/cloud/__init__.py similarity index 100% rename from tests/system/providers/apache/cassandra/__init__.py rename to providers/tests/system/dbt/cloud/__init__.py diff --git a/tests/system/providers/dbt/cloud/example_dbt_cloud.py b/providers/tests/system/dbt/cloud/example_dbt_cloud.py similarity index 94% rename from tests/system/providers/dbt/cloud/example_dbt_cloud.py rename to providers/tests/system/dbt/cloud/example_dbt_cloud.py index 897abb293b5d..17f8a59efea0 100644 --- a/tests/system/providers/dbt/cloud/example_dbt_cloud.py +++ b/providers/tests/system/dbt/cloud/example_dbt_cloud.py @@ -27,7 +27,8 @@ ) from airflow.providers.dbt.cloud.sensors.dbt import DbtCloudJobRunSensor from airflow.utils.edgemodifier import Label -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_dbt_cloud" @@ -91,13 +92,13 @@ # trigger_job_run2 >> job_run_sensor # trigger_job_run2 >> job_run_sensor_deferred - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/drill/__init__.py b/providers/tests/system/dingding/__init__.py similarity index 100% rename from tests/system/providers/apache/drill/__init__.py rename to providers/tests/system/dingding/__init__.py diff --git a/tests/system/providers/dingding/example_dingding.py b/providers/tests/system/dingding/example_dingding.py similarity index 98% rename from tests/system/providers/dingding/example_dingding.py rename to providers/tests/system/dingding/example_dingding.py index 240a732805d3..4ecb41c68cc9 100644 --- a/tests/system/providers/dingding/example_dingding.py +++ b/providers/tests/system/dingding/example_dingding.py @@ -199,13 +199,13 @@ def failure_callback(context): >> msg_failure_callback ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/druid/__init__.py b/providers/tests/system/docker/__init__.py similarity index 100% rename from tests/system/providers/apache/druid/__init__.py rename to providers/tests/system/docker/__init__.py diff --git a/tests/system/providers/docker/example_docker.py b/providers/tests/system/docker/example_docker.py similarity index 96% rename from tests/system/providers/docker/example_docker.py rename to providers/tests/system/docker/example_docker.py index 18f7d2f0ea0c..108813236ac6 100644 --- a/tests/system/providers/docker/example_docker.py +++ b/providers/tests/system/docker/example_docker.py @@ -57,7 +57,7 @@ t1 >> [t2, t3] >> t4 ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_docker_copy_data.py b/providers/tests/system/docker/example_docker_copy_data.py similarity index 97% rename from tests/system/providers/docker/example_docker_copy_data.py rename to providers/tests/system/docker/example_docker_copy_data.py index 4e4e8466e501..c270408390cf 100644 --- a/tests/system/providers/docker/example_docker_copy_data.py +++ b/providers/tests/system/docker/example_docker_copy_data.py @@ -103,7 +103,7 @@ t_is_data_available >> t_move >> t_print ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_docker_swarm.py b/providers/tests/system/docker/example_docker_swarm.py similarity index 95% rename from tests/system/providers/docker/example_docker_swarm.py rename to providers/tests/system/docker/example_docker_swarm.py index 6546c7d434ac..3a68dcea8167 100644 --- a/tests/system/providers/docker/example_docker_swarm.py +++ b/providers/tests/system/docker/example_docker_swarm.py @@ -47,7 +47,7 @@ t1 ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py similarity index 97% rename from tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py rename to providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py index 95c502a22107..3c8b528606e0 100644 --- a/tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +++ b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py @@ -116,7 +116,7 @@ def load(total_order_value: float): tutorial_dag = tutorial_taskflow_api_docker_virtualenv() # [END dag_invocation] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kafka/__init__.py b/providers/tests/system/elasticsearch/__init__.py similarity index 100% rename from tests/system/providers/apache/kafka/__init__.py rename to providers/tests/system/elasticsearch/__init__.py diff --git a/tests/system/providers/elasticsearch/example_elasticsearch_query.py b/providers/tests/system/elasticsearch/example_elasticsearch_query.py similarity index 97% rename from tests/system/providers/elasticsearch/example_elasticsearch_query.py rename to providers/tests/system/elasticsearch/example_elasticsearch_query.py index 31e9440e6cbe..93471d874c3c 100644 --- a/tests/system/providers/elasticsearch/example_elasticsearch_query.py +++ b/providers/tests/system/elasticsearch/example_elasticsearch_query.py @@ -81,7 +81,7 @@ def use_elasticsearch_hook(): task_id="print_data_from_elasticsearch", python_callable=use_elasticsearch_hook ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kylin/__init__.py b/providers/tests/system/ftp/__init__.py similarity index 100% rename from tests/system/providers/apache/kylin/__init__.py rename to providers/tests/system/ftp/__init__.py diff --git a/tests/system/providers/ftp/example_ftp.py b/providers/tests/system/ftp/example_ftp.py similarity index 95% rename from tests/system/providers/ftp/example_ftp.py rename to providers/tests/system/ftp/example_ftp.py index bdc9399e4688..8fb2e38806ae 100644 --- a/tests/system/providers/ftp/example_ftp.py +++ b/providers/tests/system/ftp/example_ftp.py @@ -87,13 +87,13 @@ ftp_put >> ftp_get ftps_put >> ftps_get - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/livy/__init__.py b/providers/tests/system/github/__init__.py similarity index 100% rename from tests/system/providers/apache/livy/__init__.py rename to providers/tests/system/github/__init__.py diff --git a/tests/system/providers/github/example_github.py b/providers/tests/system/github/example_github.py similarity index 97% rename from tests/system/providers/github/example_github.py rename to providers/tests/system/github/example_github.py index 81a458021aa6..70eb8bf902f0 100644 --- a/tests/system/providers/github/example_github.py +++ b/providers/tests/system/github/example_github.py @@ -100,7 +100,7 @@ def tag_checker(repo: Any, tag_name: str) -> bool | None: # [END howto_operator_list_tags_github] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/README.md b/providers/tests/system/google/README.md similarity index 100% rename from tests/system/providers/google/README.md rename to providers/tests/system/google/README.md diff --git a/tests/system/providers/google/__init__.py b/providers/tests/system/google/__init__.py similarity index 100% rename from tests/system/providers/google/__init__.py rename to providers/tests/system/google/__init__.py diff --git a/tests/system/providers/apache/pig/__init__.py b/providers/tests/system/google/ads/__init__.py similarity index 100% rename from tests/system/providers/apache/pig/__init__.py rename to providers/tests/system/google/ads/__init__.py diff --git a/tests/system/providers/google/ads/example_ads.py b/providers/tests/system/google/ads/example_ads.py similarity index 95% rename from tests/system/providers/google/ads/example_ads.py rename to providers/tests/system/google/ads/example_ads.py index 0f4e6d3a3f4a..162fd45334f6 100644 --- a/tests/system/providers/google/ads/example_ads.py +++ b/providers/tests/system/google/ads/example_ads.py @@ -29,7 +29,8 @@ from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_google_ads_env_variables] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -116,14 +117,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/pinot/__init__.py b/providers/tests/system/google/cloud/__init__.py similarity index 100% rename from tests/system/providers/apache/pinot/__init__.py rename to providers/tests/system/google/cloud/__init__.py diff --git a/tests/system/providers/apache/spark/__init__.py b/providers/tests/system/google/cloud/automl/__init__.py similarity index 100% rename from tests/system/providers/apache/spark/__init__.py rename to providers/tests/system/google/cloud/automl/__init__.py diff --git a/tests/system/providers/google/cloud/automl/example_automl_dataset.py b/providers/tests/system/google/cloud/automl/example_automl_dataset.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_dataset.py rename to providers/tests/system/google/cloud/automl/example_automl_dataset.py index f90caf32df0c..49de111ffb35 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_dataset.py +++ b/providers/tests/system/google/cloud/automl/example_automl_dataset.py @@ -162,14 +162,14 @@ def upload_updated_csv_file_to_gcs(): >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_translation.py b/providers/tests/system/google/cloud/automl/example_automl_translation.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_translation.py rename to providers/tests/system/google/cloud/automl/example_automl_translation.py index cda70693fb57..60dbf782f281 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_translation.py +++ b/providers/tests/system/google/cloud/automl/example_automl_translation.py @@ -188,14 +188,14 @@ def upload_csv_file_to_gcs(): >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_video_classification.py b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_video_classification.py rename to providers/tests/system/google/cloud/automl/example_automl_video_classification.py index 21e4c4623768..36853c382603 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_video_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_video_tracking.py b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_video_tracking.py rename to providers/tests/system/google/cloud/automl/example_automl_video_tracking.py index 36133b3674a8..65718f10b769 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_video_tracking.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_vision_classification.py rename to providers/tests/system/google/cloud/automl/example_automl_vision_classification.py index 2e21f252d818..40e68d3291ce 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py @@ -132,13 +132,13 @@ >> delete_image_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py rename to providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py index 2f92e81d9aaf..334e154cea4d 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py @@ -134,13 +134,13 @@ >> delete_image_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/asana/__init__.py b/providers/tests/system/google/cloud/automl/resources/__init__.py similarity index 100% rename from tests/system/providers/asana/__init__.py rename to providers/tests/system/google/cloud/automl/resources/__init__.py diff --git a/tests/system/providers/cncf/__init__.py b/providers/tests/system/google/cloud/azure/__init__.py similarity index 100% rename from tests/system/providers/cncf/__init__.py rename to providers/tests/system/google/cloud/azure/__init__.py diff --git a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py similarity index 94% rename from tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py rename to providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py index 265b9e0a4ab8..837cb99e5ffc 100644 --- a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py @@ -59,13 +59,13 @@ (wait_for_blob >> transfer_files_to_gcs) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py similarity index 93% rename from tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py rename to providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py index 80eaadd6c6d6..a897b6dc7d3d 100644 --- a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -80,14 +81,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/__init__.py b/providers/tests/system/google/cloud/bigquery/__init__.py similarity index 100% rename from tests/system/providers/cncf/kubernetes/__init__.py rename to providers/tests/system/google/cloud/bigquery/__init__.py diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py index 004f996975be..87e13a2c2516 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py @@ -85,14 +85,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py index 3f75d4393513..b387563069a9 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py @@ -44,7 +44,8 @@ from airflow.providers.google.cloud.sensors.bigquery_dts import BigQueryDataTransferServiceTransferRunSensor from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -179,14 +180,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py similarity index 96% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py index a394c882bede..5e487f5a93e2 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py @@ -97,14 +97,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py index ac5b4dd967b3..2b026070a06b 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py @@ -77,14 +77,14 @@ >> delete_dataset_with_location ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py index ab7a4b3757b9..cd31ea05bcdc 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py @@ -39,7 +39,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -246,7 +247,7 @@ execute_insert_query >> [check_count, check_value, check_interval] >> delete_dataset execute_insert_query >> [column_check, table_check] >> delete_dataset - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -256,7 +257,7 @@ globals()[DAG_ID] = dag for dag in DAGS_LIST: - from tests.system.utils import get_test_run + from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py index a007e1cd639c..012eba080d1a 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py @@ -259,14 +259,14 @@ insert_query_job >> execute_long_running_query >> check_value >> check_interval [check_count, check_interval, bigquery_execute_multi_query, get_data_result] >> delete_dataset - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py index 57cde5e9aa82..aa8d36f5ed89 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py @@ -165,14 +165,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py index 3539da742a2c..9c1432a0a8da 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_tables" @@ -223,14 +224,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py similarity index 96% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py index 67ad5da80256..cb1fa63498da 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py @@ -100,14 +100,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py similarity index 94% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py index 8edeb1fb9f9d..21acb27f161d 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs" @@ -97,14 +98,14 @@ >> [delete_bucket, delete_dataset] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py similarity index 94% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py index c2ba4bf287d1..fdf084601c77 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs_async" @@ -96,14 +97,14 @@ >> [delete_bucket, delete_dataset] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py index e9b3269ecfb6..51e713560bc2 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py @@ -326,13 +326,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py index cacd94a42a6b..a3b726098773 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py @@ -88,13 +88,13 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py index 4a3b0386da0f..ca4e3148d0a1 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py @@ -362,13 +362,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py index e88858ef8c0b..73d55710c9db 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py @@ -34,7 +34,8 @@ from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -116,13 +117,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py index 4a14ac6a810b..3872ca092321 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py @@ -131,8 +131,8 @@ >> delete_dataset ) - from tests.system.utils import get_test_run - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.system_tests import get_test_run + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -142,7 +142,7 @@ test_run = get_test_run(dag) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cohere/__init__.py b/providers/tests/system/google/cloud/bigquery/resources/__init__.py similarity index 100% rename from tests/system/providers/cohere/__init__.py rename to providers/tests/system/google/cloud/bigquery/resources/__init__.py diff --git a/tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql b/providers/tests/system/google/cloud/bigquery/resources/example_bigquery_query.sql similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql rename to providers/tests/system/google/cloud/bigquery/resources/example_bigquery_query.sql diff --git a/tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json b/providers/tests/system/google/cloud/bigquery/resources/update_table_schema.json similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json rename to providers/tests/system/google/cloud/bigquery/resources/update_table_schema.json diff --git a/tests/system/providers/google/cloud/bigquery/resources/us-states.csv b/providers/tests/system/google/cloud/bigquery/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/us-states.csv rename to providers/tests/system/google/cloud/bigquery/resources/us-states.csv diff --git a/tests/system/providers/common/__init__.py b/providers/tests/system/google/cloud/bigtable/__init__.py similarity index 100% rename from tests/system/providers/common/__init__.py rename to providers/tests/system/google/cloud/bigtable/__init__.py diff --git a/tests/system/providers/google/cloud/bigtable/example_bigtable.py b/providers/tests/system/google/cloud/bigtable/example_bigtable.py similarity index 97% rename from tests/system/providers/google/cloud/bigtable/example_bigtable.py rename to providers/tests/system/google/cloud/bigtable/example_bigtable.py index 1c690f2ddc72..77abc49fae25 100644 --- a/tests/system/providers/google/cloud/bigtable/example_bigtable.py +++ b/providers/tests/system/google/cloud/bigtable/example_bigtable.py @@ -61,7 +61,8 @@ ) from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -224,14 +225,14 @@ def update_clusters_and_instance(): >> [delete_instance_task, delete_instance_task2] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/io/__init__.py b/providers/tests/system/google/cloud/cloud_batch/__init__.py similarity index 100% rename from tests/system/providers/common/io/__init__.py rename to providers/tests/system/google/cloud/cloud_batch/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py similarity index 96% rename from tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py rename to providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py index 58d852a286bc..84dc031a5268 100644 --- a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +++ b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py @@ -35,7 +35,8 @@ CloudBatchSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -186,13 +187,13 @@ def _create_job(): ([submit1, submit2] >> list_tasks >> assert_tasks >> list_jobs >> get_name >> [delete_job1, delete_job2]) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/sql/__init__.py b/providers/tests/system/google/cloud/cloud_build/__init__.py similarity index 100% rename from tests/system/providers/common/sql/__init__.py rename to providers/tests/system/google/cloud/cloud_build/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py similarity index 97% rename from tests/system/providers/google/cloud/cloud_build/example_cloud_build.py rename to providers/tests/system/google/cloud/cloud_build/example_cloud_build.py index cb31a3b4d091..bf2391413a1c 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py @@ -39,7 +39,8 @@ CloudBuildRetryBuildOperator, ) from airflow.providers.standard.operators.bash import BashOperator -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -51,7 +52,7 @@ # If you'd like to run this system test locally, please # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: -# tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml +# providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" CURRENT_FOLDER = Path(__file__).parent @@ -266,14 +267,14 @@ def no_wait_cancel_retry_get_deferrable(): ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py similarity index 96% rename from tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py rename to providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py index 6c2c64ba6283..f6873da9c568 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py @@ -41,7 +41,8 @@ CloudBuildUpdateBuildTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -52,7 +53,7 @@ # If you'd like to run this system test locally, please # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: -# tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml +# providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}".replace("_", "-") @@ -181,14 +182,14 @@ def get_project_number(): >> list_build_triggers ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/__init__.py b/providers/tests/system/google/cloud/cloud_build/resources/__init__.py similarity index 100% rename from tests/system/providers/databricks/__init__.py rename to providers/tests/system/google/cloud/cloud_build/resources/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml b/providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml rename to providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml diff --git a/tests/system/providers/dbt/cloud/__init__.py b/providers/tests/system/google/cloud/cloud_functions/__init__.py similarity index 100% rename from tests/system/providers/dbt/cloud/__init__.py rename to providers/tests/system/google/cloud/cloud_functions/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_functions/example_functions.py b/providers/tests/system/google/cloud/cloud_functions/example_functions.py similarity index 95% rename from tests/system/providers/google/cloud/cloud_functions/example_functions.py rename to providers/tests/system/google/cloud/cloud_functions/example_functions.py index 84e31bc74517..fd0b8ea5f5a4 100644 --- a/tests/system/providers/google/cloud/cloud_functions/example_functions.py +++ b/providers/tests/system/google/cloud/cloud_functions/example_functions.py @@ -34,7 +34,8 @@ CloudFunctionDeployFunctionOperator, CloudFunctionInvokeFunctionOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -121,14 +122,14 @@ delete_function, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/dingding/__init__.py b/providers/tests/system/google/cloud/cloud_memorystore/__init__.py similarity index 100% rename from tests/system/providers/dingding/__init__.py rename to providers/tests/system/google/cloud/cloud_memorystore/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py rename to providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py index 4884122751e0..670a850b04ac 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py @@ -198,14 +198,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py similarity index 97% rename from tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py rename to providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py index c46d966371da..3d8ebf6f287d 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py @@ -47,7 +47,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") ENV_ID_LOWER = ENV_ID.lower() if ENV_ID else "" @@ -265,14 +266,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/__init__.py b/providers/tests/system/google/cloud/cloud_run/__init__.py similarity index 100% rename from tests/system/providers/docker/__init__.py rename to providers/tests/system/google/cloud/cloud_run/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_run/example_cloud_run.py rename to providers/tests/system/google/cloud/cloud_run/example_cloud_run.py index 23370d26876b..08145e0336ed 100644 --- a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py @@ -367,13 +367,13 @@ def _create_job_instance_with_label(): >> (delete_job1, delete_job2, delete_job3) ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py similarity index 95% rename from tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py rename to providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py index 0c838e6d4e28..43318c4b1430 100644 --- a/tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py @@ -84,14 +84,14 @@ def _create_service(): >> delete_cloud_run_service ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/elasticsearch/__init__.py b/providers/tests/system/google/cloud/cloud_sql/__init__.py similarity index 100% rename from tests/system/providers/elasticsearch/__init__.py rename to providers/tests/system/google/cloud/cloud_sql/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py index 7a60e3db6171..52414b2784b2 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py @@ -47,7 +47,8 @@ GCSObjectCreateAclEntryOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -293,14 +294,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py index f6d588c3156c..6cf7e0c08751 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py @@ -43,7 +43,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -540,13 +541,13 @@ def delete_connection(connection_id: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py index 4808141512ec..db77f18339af 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py @@ -48,7 +48,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -490,13 +491,13 @@ def delete_secret(ssl_secret_id, db_type: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/ftp/__init__.py b/providers/tests/system/google/cloud/composer/__init__.py similarity index 100% rename from tests/system/providers/ftp/__init__.py rename to providers/tests/system/google/cloud/composer/__init__.py diff --git a/tests/system/providers/google/cloud/composer/example_cloud_composer.py b/providers/tests/system/google/cloud/composer/example_cloud_composer.py similarity index 98% rename from tests/system/providers/google/cloud/composer/example_cloud_composer.py rename to providers/tests/system/google/cloud/composer/example_cloud_composer.py index 75ee8e90d3aa..266a7e4a444a 100644 --- a/tests/system/providers/google/cloud/composer/example_cloud_composer.py +++ b/providers/tests/system/google/cloud/composer/example_cloud_composer.py @@ -214,14 +214,14 @@ [delete_env, defer_delete_env], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/github/__init__.py b/providers/tests/system/google/cloud/compute/__init__.py similarity index 100% rename from tests/system/providers/github/__init__.py rename to providers/tests/system/google/cloud/compute/__init__.py diff --git a/tests/system/providers/google/cloud/compute/example_compute.py b/providers/tests/system/google/cloud/compute/example_compute.py similarity index 97% rename from tests/system/providers/google/cloud/compute/example_compute.py rename to providers/tests/system/google/cloud/compute/example_compute.py index 27e8ae758e70..834392089070 100644 --- a/tests/system/providers/google/cloud/compute/example_compute.py +++ b/providers/tests/system/google/cloud/compute/example_compute.py @@ -39,7 +39,8 @@ ComputeEngineStopInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -266,14 +267,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_igm.py b/providers/tests/system/google/cloud/compute/example_compute_igm.py similarity index 97% rename from tests/system/providers/google/cloud/compute/example_compute_igm.py rename to providers/tests/system/google/cloud/compute/example_compute_igm.py index 74072b209944..11357452dfc6 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_igm.py +++ b/providers/tests/system/google/cloud/compute/example_compute_igm.py @@ -38,7 +38,8 @@ ComputeEngineInstanceGroupUpdateManagerTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -235,14 +236,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh.py b/providers/tests/system/google/cloud/compute/example_compute_ssh.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh.py index e0bece195762..5b7d1523017f 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -137,14 +138,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py index 99e04fa3d3eb..63ed278a08c5 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -145,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py index b5964eed7dd5..3ab61afc0c25 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -138,14 +139,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/ads/__init__.py b/providers/tests/system/google/cloud/data_loss_prevention/__init__.py similarity index 100% rename from tests/system/providers/google/ads/__init__.py rename to providers/tests/system/google/cloud/data_loss_prevention/__init__.py diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py similarity index 96% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py index 7acae202dea8..75b1ad93dd47 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py @@ -38,7 +38,8 @@ CloudDLPUpdateDeidentifyTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_deidentify_content" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -156,14 +157,14 @@ >> delete_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py similarity index 96% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py index 40e5c030d981..fb2198b16793 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_info_types" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -153,14 +154,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py similarity index 95% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py index fb9ca15e2d72..22b259582f4a 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py @@ -37,7 +37,8 @@ CloudDLPUpdateInspectTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_inspect_template" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -114,14 +115,14 @@ >> delete_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py similarity index 94% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py index 2bea0ce57bce..b7dc2ac1a405 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py @@ -36,7 +36,8 @@ CloudDLPListDLPJobsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -89,14 +90,14 @@ (create_job >> list_jobs >> get_job >> cancel_job >> delete_job) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py similarity index 94% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py index c2a1908e87f7..902875f7b592 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py @@ -34,7 +34,8 @@ CloudDLPUpdateJobTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job_trigger" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -94,14 +95,14 @@ (create_trigger >> list_triggers >> get_trigger >> update_trigger >> delete_trigger) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/__init__.py b/providers/tests/system/google/cloud/data_loss_prevention/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/__init__.py rename to providers/tests/system/google/cloud/data_loss_prevention/resources/__init__.py diff --git a/tests/system/providers/google/cloud/data_loss_prevention/resources/dictionary.txt b/providers/tests/system/google/cloud/data_loss_prevention/resources/dictionary.txt similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/resources/dictionary.txt rename to providers/tests/system/google/cloud/data_loss_prevention/resources/dictionary.txt diff --git a/tests/system/providers/google/cloud/automl/__init__.py b/providers/tests/system/google/cloud/dataflow/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/automl/__init__.py rename to providers/tests/system/google/cloud/dataflow/__init__.py diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_go.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_go.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_go.py index 14c23a015d2d..57e5941e3ed6 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_go.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py @@ -149,7 +149,7 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py similarity index 97% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py index 34dad6cdeb29..3629194e6ee9 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py @@ -148,14 +148,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py index 181c5cbbe010..229373ed1bf3 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py index f93378c6db0b..31f1cd026f19 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py @@ -176,14 +176,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py similarity index 97% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py index 8f41b3ed7fb2..cfd5e06b029b 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py @@ -137,14 +137,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py index 4225b6c9ff80..1a6d0aec3923 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py @@ -177,14 +177,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py similarity index 95% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py index 5b016fec3ec3..e3b0ee711a92 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.dataflow import DataflowStartSqlJobOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -136,13 +137,13 @@ >> delete_bq_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py index a858ad90aa93..a15dffb3a328 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py @@ -114,14 +114,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_template.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_template.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_template.py index 79ddfbef438d..86545514607a 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py @@ -37,7 +37,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -157,14 +158,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py index d5162e8adf1b..2243ad695bdd 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py @@ -39,7 +39,8 @@ ) from airflow.providers.google.cloud.operators.dataflow import DataflowStartYamlJobOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -161,13 +162,13 @@ >> delete_bq_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/resources/__init__.py b/providers/tests/system/google/cloud/dataflow/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/automl/resources/__init__.py rename to providers/tests/system/google/cloud/dataflow/resources/__init__.py diff --git a/tests/system/providers/google/cloud/dataflow/resources/input.csv b/providers/tests/system/google/cloud/dataflow/resources/input.csv similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/input.csv rename to providers/tests/system/google/cloud/dataflow/resources/input.csv diff --git a/tests/system/providers/google/cloud/dataflow/resources/schema.json b/providers/tests/system/google/cloud/dataflow/resources/schema.json similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/schema.json rename to providers/tests/system/google/cloud/dataflow/resources/schema.json diff --git a/tests/system/providers/google/cloud/dataflow/resources/text.txt b/providers/tests/system/google/cloud/dataflow/resources/text.txt similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/text.txt rename to providers/tests/system/google/cloud/dataflow/resources/text.txt diff --git a/tests/system/providers/google/cloud/dataflow/resources/wordcount.go b/providers/tests/system/google/cloud/dataflow/resources/wordcount.go similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/wordcount.go rename to providers/tests/system/google/cloud/dataflow/resources/wordcount.go diff --git a/tests/system/providers/google/cloud/azure/__init__.py b/providers/tests/system/google/cloud/dataform/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/azure/__init__.py rename to providers/tests/system/google/cloud/dataform/__init__.py diff --git a/tests/system/providers/google/cloud/dataform/example_dataform.py b/providers/tests/system/google/cloud/dataform/example_dataform.py similarity index 98% rename from tests/system/providers/google/cloud/dataform/example_dataform.py rename to providers/tests/system/google/cloud/dataform/example_dataform.py index 3f2ad987f233..b61247a87700 100644 --- a/tests/system/providers/google/cloud/dataform/example_dataform.py +++ b/providers/tests/system/google/cloud/dataform/example_dataform.py @@ -48,7 +48,8 @@ from airflow.providers.google.cloud.sensors.dataform import DataformWorkflowInvocationStateSensor from airflow.providers.google.cloud.utils.dataform import make_initialization_workspace_flow from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -327,13 +328,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/__init__.py b/providers/tests/system/google/cloud/datafusion/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigquery/__init__.py rename to providers/tests/system/google/cloud/datafusion/__init__.py diff --git a/tests/system/providers/google/cloud/datafusion/example_datafusion.py b/providers/tests/system/google/cloud/datafusion/example_datafusion.py similarity index 98% rename from tests/system/providers/google/cloud/datafusion/example_datafusion.py rename to providers/tests/system/google/cloud/datafusion/example_datafusion.py index d206c2260ee2..a61fbbd01da9 100644 --- a/tests/system/providers/google/cloud/datafusion/example_datafusion.py +++ b/providers/tests/system/google/cloud/datafusion/example_datafusion.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.sensors.datafusion import CloudDataFusionPipelineStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_data_fusion_env_variables] SERVICE_ACCOUNT = os.environ.get("GCP_DATAFUSION_SERVICE_ACCOUNT") @@ -339,13 +340,13 @@ def get_artifacts_versions(ti=None): >> [delete_bucket1, delete_bucket2] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/resources/__init__.py b/providers/tests/system/google/cloud/datapipelines/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/__init__.py rename to providers/tests/system/google/cloud/datapipelines/__init__.py diff --git a/tests/system/providers/google/cloud/datapipelines/example_datapipeline.py b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py similarity index 96% rename from tests/system/providers/google/cloud/datapipelines/example_datapipeline.py rename to providers/tests/system/google/cloud/datapipelines/example_datapipeline.py index f13a52a7c673..91fb7eed26d2 100644 --- a/tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +++ b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py @@ -37,7 +37,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "datapipeline" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -137,14 +138,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigtable/__init__.py b/providers/tests/system/google/cloud/dataplex/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigtable/__init__.py rename to providers/tests/system/google/cloud/dataplex/__init__.py diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex.py b/providers/tests/system/google/cloud/dataplex/example_dataplex.py similarity index 97% rename from tests/system/providers/google/cloud/dataplex/example_dataplex.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex.py index a0ac55e07fc9..34e9a0fd05c1 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex.py @@ -40,7 +40,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexTaskStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -205,14 +206,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py similarity index 98% rename from tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py index 3203a0d0cd3e..44f4af243592 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py @@ -50,7 +50,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataProfileJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -328,14 +329,14 @@ [delete_lake, delete_dataset], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py similarity index 98% rename from tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py index 6255f5425f18..d31d574755c9 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py @@ -50,7 +50,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataQualityJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -362,14 +363,14 @@ [delete_lake, delete_dataset], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_batch/__init__.py b/providers/tests/system/google/cloud/dataprep/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_batch/__init__.py rename to providers/tests/system/google/cloud/dataprep/__init__.py diff --git a/tests/system/providers/google/cloud/dataprep/example_dataprep.py b/providers/tests/system/google/cloud/dataprep/example_dataprep.py similarity index 98% rename from tests/system/providers/google/cloud/dataprep/example_dataprep.py rename to providers/tests/system/google/cloud/dataprep/example_dataprep.py index 357328505519..9f603f43fb1b 100644 --- a/tests/system/providers/google/cloud/dataprep/example_dataprep.py +++ b/providers/tests/system/google/cloud/dataprep/example_dataprep.py @@ -47,7 +47,8 @@ from airflow.providers.google.cloud.sensors.dataprep import DataprepJobGroupIsFinishedSensor from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataprep" @@ -306,13 +307,13 @@ def delete_connection(connection_id: str) -> None: [delete_bucket_task, delete_connection_task], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/__init__.py b/providers/tests/system/google/cloud/dataproc/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/__init__.py rename to providers/tests/system/google/cloud/dataproc/__init__.py diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py index 6771a529d79e..54481f43ca76 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py @@ -35,7 +35,8 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocBatchSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -175,13 +176,13 @@ >> delete_batch_4 ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py similarity index 93% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py index 557b8ce49060..79d3766995e3 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py @@ -34,7 +34,8 @@ DataprocGetBatchOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_deferrable" @@ -90,14 +91,14 @@ >> delete_batch ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py index fb592b0cdb6f..a8ed6b13de55 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py @@ -35,7 +35,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_ps" @@ -140,14 +141,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py index ef57500639dc..0eb27762c2f9 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py @@ -34,7 +34,8 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_create_existing_stopped_cluster" @@ -120,13 +121,13 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py index 3ff91d95a95b..8ae262dbf624 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py @@ -33,7 +33,8 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_def" @@ -136,14 +137,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py index 1d94f688996f..3eacc905bc70 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py @@ -33,7 +33,8 @@ DataprocDiagnoseClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_diagnose_cluster" @@ -114,14 +115,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py index 19e2300e28ea..f990363c4800 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py @@ -39,7 +39,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_generation" @@ -134,14 +135,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py index 7759ddd098f0..2e4b698573bf 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py @@ -34,7 +34,8 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_cluster_start_stop" @@ -110,13 +111,13 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py index ea7725209f80..7ed0061d9947 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py @@ -33,7 +33,8 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_update" @@ -119,14 +120,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py index 2eb6d4c4bdf4..ce1a6fc4451e 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_flink" @@ -123,14 +124,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py index becf273784ab..bb1044da6241 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py @@ -43,7 +43,8 @@ GKEDeleteClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_gke" @@ -143,13 +144,13 @@ >> delete_gke_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py index 23ddd6eb581b..c1f5423041fa 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hadoop" @@ -122,14 +123,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py index 193d790a5282..6aadceb552f6 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hive" @@ -123,14 +124,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py index 344adaeca2e6..762ca2bc73fd 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_pig" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -107,14 +108,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py index 224dfc3db5b1..f5bdf25732a5 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_presto" @@ -114,14 +115,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py index 44809b283767..71e7245b7d72 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py @@ -38,7 +38,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_pyspark" @@ -139,14 +140,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py index ca76810cb43e..e43a56d230bf 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark" @@ -110,14 +111,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py index 2ba8cc512f73..475e9912e856 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocJobSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_async" @@ -120,14 +121,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py index cee74bb819b9..5e6d0b773af6 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py @@ -34,7 +34,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_deferrable" @@ -111,14 +112,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py index dc446ad332e1..aef860a0e545 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_sql" @@ -107,14 +108,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py index 1468edf05ece..7b416ade3864 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py @@ -38,7 +38,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_sparkr" @@ -135,14 +136,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py index 4631733cd0b4..d3f7f2a1a3a4 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_trino" @@ -116,14 +117,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py index 334826b56523..ab465a124d5f 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py @@ -29,7 +29,8 @@ DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow" @@ -103,14 +104,14 @@ >> instantiate_inline_workflow_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py index ec765250a332..e2319a35a124 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py @@ -29,7 +29,8 @@ DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow_def" @@ -107,14 +108,14 @@ >> instantiate_inline_workflow_template_async ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/resources/__init__.py b/providers/tests/system/google/cloud/dataproc_metastore/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/resources/__init__.py rename to providers/tests/system/google/cloud/dataproc_metastore/__init__.py diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py similarity index 97% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py index 0d0c41f52fd9..ee3b5b70ad51 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py @@ -42,7 +42,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -191,14 +192,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py index 6a5a7566b342..12af45d21065 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py @@ -35,7 +35,8 @@ DataprocMetastoreRestoreServiceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore_backup" @@ -125,14 +126,14 @@ ) (create_service >> backup_service >> list_backups >> restore_service >> delete_backup >> delete_service) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py similarity index 97% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py index 07677e11cbf3..6b06b868c73e 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py @@ -42,7 +42,8 @@ from airflow.providers.google.cloud.sensors.dataproc_metastore import MetastoreHivePartitionSensor from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "hive_partition_sensor" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -231,14 +232,14 @@ def get_hive_warehouse_bucket(**kwargs): >> [delete_dataproc_cluster, delete_metastore_service, delete_warehouse_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_functions/__init__.py b/providers/tests/system/google/cloud/datastore/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_functions/__init__.py rename to providers/tests/system/google/cloud/datastore/__init__.py diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_commit.py b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py similarity index 96% rename from tests/system/providers/google/cloud/datastore/example_datastore_commit.py rename to providers/tests/system/google/cloud/datastore/example_datastore_commit.py index fc7eabfb183a..430d176ed733 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_commit.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py @@ -38,7 +38,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -159,14 +160,14 @@ [delete_bucket, delete_export_operation, delete_import_operation], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_query.py b/providers/tests/system/google/cloud/datastore/example_datastore_query.py similarity index 93% rename from tests/system/providers/google/cloud/datastore/example_datastore_query.py rename to providers/tests/system/google/cloud/datastore/example_datastore_query.py index 24bb1b85dd98..0b282a1feb34 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_query.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_query.py @@ -31,7 +31,8 @@ CloudDatastoreBeginTransactionOperator, CloudDatastoreRunQueryOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -79,14 +80,14 @@ allocate_ids >> begin_transaction_query >> run_query - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py similarity index 91% rename from tests/system/providers/google/cloud/datastore/example_datastore_rollback.py rename to providers/tests/system/google/cloud/datastore/example_datastore_rollback.py index 09661b0ae0c1..44d0dc170941 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py @@ -30,7 +30,8 @@ CloudDatastoreBeginTransactionOperator, CloudDatastoreRollbackOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -62,14 +63,14 @@ begin_transaction_to_rollback >> rollback_transaction - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_memorystore/__init__.py b/providers/tests/system/google/cloud/gcs/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_memorystore/__init__.py rename to providers/tests/system/google/cloud/gcs/__init__.py diff --git a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py index 52023060c7d6..f44e7315e2d1 100644 --- a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py @@ -115,13 +115,13 @@ def delete_connection(connection_id: str) -> None: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_firestore.py b/providers/tests/system/google/cloud/gcs/example_firestore.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_firestore.py rename to providers/tests/system/google/cloud/gcs/example_firestore.py index cd829f0ca79c..89e0e3fe1c6d 100644 --- a/tests/system/providers/google/cloud/gcs/example_firestore.py +++ b/providers/tests/system/google/cloud/gcs/example_firestore.py @@ -170,14 +170,14 @@ >> [delete_dataset, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_acl.py b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py similarity index 94% rename from tests/system/providers/google/cloud/gcs/example_gcs_acl.py rename to providers/tests/system/google/cloud/gcs/example_gcs_acl.py index 0b550b22ba07..7843c0877c1d 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_acl.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py @@ -33,7 +33,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -107,14 +108,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py rename to providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py index aebb1e3e7ed8..6bbec540df9f 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py @@ -36,7 +36,8 @@ from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -121,14 +122,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_sensor.py b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_sensor.py rename to providers/tests/system/google/cloud/gcs/example_gcs_sensor.py index 5dc42604ddca..2d4da2887d4e 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py @@ -36,7 +36,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -200,14 +201,14 @@ def mode_setter(self, value): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py index 94286ade4369..5d7e393bff29 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py @@ -31,7 +31,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "gcs_to_bigquery_operator" @@ -81,13 +82,13 @@ >> delete_test_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py index 27bef2486fd5..1ca531b81800 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py @@ -31,7 +31,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -173,13 +174,13 @@ >> delete_test_dataset_delimiter ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py index 55bec85a5056..ac1d70307a53 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py @@ -40,7 +40,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -274,14 +275,14 @@ def delete_work_dir(create_workdir_result: str) -> None: [delete_bucket_src, delete_bucket_dst, delete_work_dir(create_workdir_task)], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py index e121e63bac97..bfb200a15a63 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py @@ -39,7 +39,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -203,13 +204,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py index a25c499ec507..69947698cd7b 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py @@ -131,13 +131,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_transform.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_gcs_transform.py rename to providers/tests/system/google/cloud/gcs/example_gcs_transform.py index 018076173d28..0b59119c6bc1 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_transform.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py @@ -33,7 +33,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -94,14 +95,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py rename to providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py index eaa3d5e5c0ed..3a5b03695ee4 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -113,14 +114,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py similarity index 94% rename from tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py rename to providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py index ae34aa639f95..6907a29fb538 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py @@ -30,7 +30,8 @@ from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -91,14 +92,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py index a5842ac17a53..6ee188bd957e 100644 --- a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py @@ -33,7 +33,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -164,13 +165,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py index 995361813c7e..bc6ac79f1e0b 100644 --- a/tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py @@ -23,7 +23,8 @@ from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID try: from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator @@ -77,14 +78,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py index a673ab88f722..cc53337e5693 100644 --- a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py @@ -295,13 +295,13 @@ def delete_connection(connection_id: str) -> None: mysql_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py similarity index 92% rename from tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py index 8e2b6fb979ef..c727eef1f66f 100644 --- a/tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.oracle_to_gcs import OracleToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -63,13 +64,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py index 8b53e8e53115..487dbbce552e 100644 --- a/tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -109,14 +110,14 @@ def upload_file(): >> delete_gcs_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py index 2860d8552e10..994e7e8242e1 100644 --- a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py @@ -31,7 +31,8 @@ from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -118,14 +119,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sheets.py b/providers/tests/system/google/cloud/gcs/example_sheets.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_sheets.py rename to providers/tests/system/google/cloud/gcs/example_sheets.py index 2247819494f9..7d1737981916 100644 --- a/tests/system/providers/google/cloud/gcs/example_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets.py @@ -145,13 +145,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py index d4890cbbb1a4..08688c332563 100644 --- a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py @@ -120,13 +120,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py index f19283743714..af92e550481f 100644 --- a/tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py @@ -221,13 +221,13 @@ def safe_name(s: str) -> str: >> [delete_dataset, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_run/__init__.py b/providers/tests/system/google/cloud/gcs/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_run/__init__.py rename to providers/tests/system/google/cloud/gcs/resources/__init__.py diff --git a/tests/system/providers/google/cloud/gcs/resources/example_upload.txt b/providers/tests/system/google/cloud/gcs/resources/example_upload.txt similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/example_upload.txt rename to providers/tests/system/google/cloud/gcs/resources/example_upload.txt diff --git a/tests/system/providers/google/cloud/gcs/resources/tmp.tar.gz b/providers/tests/system/google/cloud/gcs/resources/tmp.tar.gz similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/tmp.tar.gz rename to providers/tests/system/google/cloud/gcs/resources/tmp.tar.gz diff --git a/tests/system/providers/google/cloud/gcs/resources/transform_script.py b/providers/tests/system/google/cloud/gcs/resources/transform_script.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/transform_script.py rename to providers/tests/system/google/cloud/gcs/resources/transform_script.py diff --git a/tests/system/providers/google/cloud/gcs/resources/transform_timespan.py b/providers/tests/system/google/cloud/gcs/resources/transform_timespan.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/transform_timespan.py rename to providers/tests/system/google/cloud/gcs/resources/transform_timespan.py diff --git a/tests/system/providers/google/cloud/gcs/resources/us-states.csv b/providers/tests/system/google/cloud/gcs/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/us-states.csv rename to providers/tests/system/google/cloud/gcs/resources/us-states.csv diff --git a/tests/system/providers/google/cloud/cloud_sql/__init__.py b/providers/tests/system/google/cloud/kubernetes_engine/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_sql/__init__.py rename to providers/tests/system/google/cloud/kubernetes_engine/__init__.py diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py similarity index 95% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py index 173fddad3a06..e9fe3f6836b9 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py @@ -32,7 +32,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine" @@ -112,14 +113,14 @@ create_cluster >> [pod_task, pod_task_xcom] >> delete_cluster pod_task_xcom >> pod_task_xcom_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py similarity index 95% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py index e974a628c7a5..f5cb8f570754 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py @@ -32,7 +32,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine_async" @@ -115,14 +116,14 @@ create_cluster >> [pod_task, pod_task_xcom_async] >> delete_cluster pod_task_xcom_async >> pod_task_xcom_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py similarity index 97% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py index 2c7790c10220..a4c56c17e5ba 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py @@ -179,14 +179,14 @@ delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py similarity index 97% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py index 5858256802a3..06c23432a923 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py @@ -179,14 +179,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py similarity index 96% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py index 44479099d6a7..784ba994862b 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py @@ -101,14 +101,14 @@ create_cluster >> create_resource_task >> delete_resource_task >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/composer/__init__.py b/providers/tests/system/google/cloud/life_sciences/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/composer/__init__.py rename to providers/tests/system/google/cloud/life_sciences/__init__.py diff --git a/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py similarity index 95% rename from tests/system/providers/google/cloud/life_sciences/example_life_sciences.py rename to providers/tests/system/google/cloud/life_sciences/example_life_sciences.py index f129dc38ea54..170eeb39ad1a 100644 --- a/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +++ b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.life_sciences import LifeSciencesRunPipelineOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -130,14 +131,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/__init__.py b/providers/tests/system/google/cloud/life_sciences/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/compute/__init__.py rename to providers/tests/system/google/cloud/life_sciences/resources/__init__.py diff --git a/tests/system/providers/google/cloud/life_sciences/resources/file b/providers/tests/system/google/cloud/life_sciences/resources/file similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/resources/file rename to providers/tests/system/google/cloud/life_sciences/resources/file diff --git a/tests/system/providers/google/cloud/data_loss_prevention/__init__.py b/providers/tests/system/google/cloud/ml_engine/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/__init__.py rename to providers/tests/system/google/cloud/ml_engine/__init__.py diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py similarity index 98% rename from tests/system/providers/google/cloud/ml_engine/example_mlengine.py rename to providers/tests/system/google/cloud/ml_engine/example_mlengine.py index bde2c0bbaf9e..f0eaf7cb3017 100644 --- a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py +++ b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py @@ -280,13 +280,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/resources/__init__.py b/providers/tests/system/google/cloud/natural_language/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/resources/__init__.py rename to providers/tests/system/google/cloud/natural_language/__init__.py diff --git a/tests/system/providers/google/cloud/natural_language/example_natural_language.py b/providers/tests/system/google/cloud/natural_language/example_natural_language.py similarity index 97% rename from tests/system/providers/google/cloud/natural_language/example_natural_language.py rename to providers/tests/system/google/cloud/natural_language/example_natural_language.py index e04fdf4fb601..cc1aba0f165c 100644 --- a/tests/system/providers/google/cloud/natural_language/example_natural_language.py +++ b/providers/tests/system/google/cloud/natural_language/example_natural_language.py @@ -119,13 +119,13 @@ analyze_sentiment >> analyze_sentiment_result analyze_classify_text >> analyze_classify_text_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/__init__.py b/providers/tests/system/google/cloud/pubsub/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataflow/__init__.py rename to providers/tests/system/google/cloud/pubsub/__init__.py diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub.py b/providers/tests/system/google/cloud/pubsub/example_pubsub.py similarity index 97% rename from tests/system/providers/google/cloud/pubsub/example_pubsub.py rename to providers/tests/system/google/cloud/pubsub/example_pubsub.py index 4ff3091e5fa5..93ab1cde6e0a 100644 --- a/tests/system/providers/google/cloud/pubsub/example_pubsub.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub.py @@ -146,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py similarity index 96% rename from tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py rename to providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py index 22c0d012aea9..a902ea5617f6 100644 --- a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py @@ -101,14 +101,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/resources/__init__.py b/providers/tests/system/google/cloud/spanner/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/__init__.py rename to providers/tests/system/google/cloud/spanner/__init__.py diff --git a/tests/system/providers/google/cloud/spanner/example_spanner.py b/providers/tests/system/google/cloud/spanner/example_spanner.py similarity index 96% rename from tests/system/providers/google/cloud/spanner/example_spanner.py rename to providers/tests/system/google/cloud/spanner/example_spanner.py index a7bef8f6ecd0..b11a5cd61f92 100644 --- a/tests/system/providers/google/cloud/spanner/example_spanner.py +++ b/providers/tests/system/google/cloud/spanner/example_spanner.py @@ -34,7 +34,8 @@ SpannerUpdateDatabaseInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -160,14 +161,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataform/__init__.py b/providers/tests/system/google/cloud/speech_to_text/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataform/__init__.py rename to providers/tests/system/google/cloud/speech_to_text/__init__.py diff --git a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py similarity index 94% rename from tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py rename to providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py index d551144a5341..f2382a6053aa 100644 --- a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +++ b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.speech_to_text import CloudSpeechToTextRecognizeSpeechOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -88,14 +89,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datafusion/__init__.py b/providers/tests/system/google/cloud/sql_to_sheets/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datafusion/__init__.py rename to providers/tests/system/google/cloud/sql_to_sheets/__init__.py diff --git a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py similarity index 98% rename from tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py rename to providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py index 11231c0dfd40..d34531ba52a0 100644 --- a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py +++ b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py @@ -315,13 +315,13 @@ def delete_connection(connection_id: str) -> None: ] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datapipelines/__init__.py b/providers/tests/system/google/cloud/stackdriver/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datapipelines/__init__.py rename to providers/tests/system/google/cloud/stackdriver/__init__.py diff --git a/tests/system/providers/google/cloud/stackdriver/example_stackdriver.py b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py similarity index 97% rename from tests/system/providers/google/cloud/stackdriver/example_stackdriver.py rename to providers/tests/system/google/cloud/stackdriver/example_stackdriver.py index ea8690e70dbe..202c9bcfbdcc 100644 --- a/tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +++ b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py @@ -40,7 +40,8 @@ StackdriverUpsertNotificationChannelOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -227,13 +228,13 @@ delete_alert_policy_2, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/__init__.py b/providers/tests/system/google/cloud/storage_transfer/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataplex/__init__.py rename to providers/tests/system/google/cloud/storage_transfer/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py similarity index 98% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py index e42be683ea52..4d6a01095440 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py @@ -62,7 +62,8 @@ CloudDataTransferServiceJobStatusSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -253,14 +254,14 @@ ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py similarity index 98% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py index 2920409ce754..b2cef2f831b5 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py @@ -195,13 +195,13 @@ >> [delete_transfer, delete_bucket_src, delete_bucket_dst] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py index 53752d207913..e670eba75542 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py @@ -106,13 +106,13 @@ >> [delete_bucket_src, delete_bucket_dst] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataprep/__init__.py b/providers/tests/system/google/cloud/storage_transfer/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataprep/__init__.py rename to providers/tests/system/google/cloud/storage_transfer/resources/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcp_file b/providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcp_file similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcp_file rename to providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcp_file diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file b/providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file rename to providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file diff --git a/tests/system/providers/google/cloud/dataproc/__init__.py b/providers/tests/system/google/cloud/tasks/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataproc/__init__.py rename to providers/tests/system/google/cloud/tasks/__init__.py diff --git a/tests/system/providers/google/cloud/tasks/example_queue.py b/providers/tests/system/google/cloud/tasks/example_queue.py similarity index 97% rename from tests/system/providers/google/cloud/tasks/example_queue.py rename to providers/tests/system/google/cloud/tasks/example_queue.py index 4c29b584f5bf..9797a29d04e2 100644 --- a/tests/system/providers/google/cloud/tasks/example_queue.py +++ b/providers/tests/system/google/cloud/tasks/example_queue.py @@ -161,14 +161,14 @@ def generate_random_string(): delete_queue, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/tasks/example_tasks.py b/providers/tests/system/google/cloud/tasks/example_tasks.py similarity index 97% rename from tests/system/providers/google/cloud/tasks/example_tasks.py rename to providers/tests/system/google/cloud/tasks/example_tasks.py index 1a85ac53187b..0eae95fd1075 100644 --- a/tests/system/providers/google/cloud/tasks/example_tasks.py +++ b/providers/tests/system/google/cloud/tasks/example_tasks.py @@ -152,14 +152,14 @@ def generate_random_string(): random_string, create_queue, create_task, tasks_get, list_tasks, run_task, delete_task, delete_queue ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/__init__.py b/providers/tests/system/google/cloud/text_to_speech/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataproc_metastore/__init__.py rename to providers/tests/system/google/cloud/text_to_speech/__init__.py diff --git a/tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py similarity index 93% rename from tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py rename to providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py index f5c12a44a4ab..0227049508a7 100644 --- a/tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +++ b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py @@ -24,7 +24,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -77,14 +78,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/__init__.py b/providers/tests/system/google/cloud/transfers/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datastore/__init__.py rename to providers/tests/system/google/cloud/transfers/__init__.py diff --git a/tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py similarity index 96% rename from tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py rename to providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py index 0caed0affa19..8d3f19416313 100644 --- a/tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +++ b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py @@ -31,7 +31,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.sftp.sensors.sftp import SFTPSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -170,14 +171,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py similarity index 96% rename from tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py rename to providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py index d9ac60301b77..abfcbabce085 100644 --- a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py +++ b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py @@ -40,7 +40,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session, json from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -166,13 +167,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py rename to providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py index 33a289c1ffa1..91bde027982e 100644 --- a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py +++ b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py @@ -290,13 +290,13 @@ def delete_connection(connection_id: str) -> None: postgres_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/__init__.py b/providers/tests/system/google/cloud/transfers/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/__init__.py rename to providers/tests/system/google/cloud/transfers/resources/__init__.py diff --git a/tests/system/providers/google/cloud/transfers/resources/empty.txt b/providers/tests/system/google/cloud/transfers/resources/empty.txt similarity index 100% rename from tests/system/providers/google/cloud/transfers/resources/empty.txt rename to providers/tests/system/google/cloud/transfers/resources/empty.txt diff --git a/tests/system/providers/google/cloud/gcs/resources/__init__.py b/providers/tests/system/google/cloud/translate/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/__init__.py rename to providers/tests/system/google/cloud/translate/__init__.py diff --git a/tests/system/providers/google/cloud/translate/example_translate.py b/providers/tests/system/google/cloud/translate/example_translate.py similarity index 94% rename from tests/system/providers/google/cloud/translate/example_translate.py rename to providers/tests/system/google/cloud/translate/example_translate.py index b593060f6e5b..13a3ee061e30 100644 --- a/tests/system/providers/google/cloud/translate/example_translate.py +++ b/providers/tests/system/google/cloud/translate/example_translate.py @@ -56,14 +56,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/__init__.py b/providers/tests/system/google/cloud/translate_speech/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/kubernetes_engine/__init__.py rename to providers/tests/system/google/cloud/translate_speech/__init__.py diff --git a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py similarity index 96% rename from tests/system/providers/google/cloud/translate_speech/example_translate_speech.py rename to providers/tests/system/google/cloud/translate_speech/example_translate_speech.py index bb50adb4b62c..4fe34d0b86f5 100644 --- a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +++ b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/life_sciences/__init__.py b/providers/tests/system/google/cloud/vertex_ai/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/__init__.py rename to providers/tests/system/google/cloud/vertex_ai/__init__.py diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py index 03634b58f645..8dd0ecc9c955 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py @@ -150,13 +150,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py index c26ea94325e7..bf96da992aca 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py @@ -135,13 +135,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py similarity index 93% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py index 3303f219d4e1..ca6c65e1a6dd 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py @@ -52,13 +52,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py index 91260eccdea5..fb32c7475c7f 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py @@ -140,13 +140,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py index cde6bb183e77..6c3db89382f2 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py @@ -145,13 +145,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py index 38198b552687..78bb9ffa6bad 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py @@ -231,13 +231,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py index dc09a8be90ed..1295d20983ee 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py @@ -208,13 +208,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py index 8762feb85ba3..3fa6169cb275 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py @@ -250,13 +250,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py index 49a8d870bc39..fbc40b888216 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py @@ -211,13 +211,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py index 77b69081a473..3e1b98a2232d 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py @@ -269,13 +269,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py index 8fa802b51744..12b1181a7928 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py @@ -206,13 +206,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py index f9fe332b0af7..7e696184eef4 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py @@ -155,14 +155,14 @@ ) # [END how_to_cloud_vertex_ai_run_evaluation_operator] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py similarity index 94% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py index 18958cb409e6..affba0b6aede 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py @@ -56,13 +56,13 @@ ) # [END how_to_cloud_vertex_ai_supervised_fine_tuning_train_operator] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py index 913fff2b4e09..55254741d9cc 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py @@ -181,13 +181,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py similarity index 93% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py index 0a1a8d3fb247..75de719b6aa9 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py @@ -51,13 +51,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py index b06f8287798d..3ad5537a10c2 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py @@ -345,13 +345,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py index 7dd4aa84fe41..5ab29ee999f1 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py @@ -177,13 +177,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/life_sciences/resources/__init__.py b/providers/tests/system/google/cloud/video_intelligence/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/resources/__init__.py rename to providers/tests/system/google/cloud/video_intelligence/__init__.py diff --git a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py similarity index 97% rename from tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py rename to providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py index 499db2d6427b..e630fd1471a8 100644 --- a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +++ b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py @@ -154,14 +154,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/ml_engine/__init__.py b/providers/tests/system/google/cloud/vision/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/ml_engine/__init__.py rename to providers/tests/system/google/cloud/vision/__init__.py diff --git a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py similarity index 97% rename from tests/system/providers/google/cloud/vision/example_vision_annotate_image.py rename to providers/tests/system/google/cloud/vision/example_vision_annotate_image.py index 2a4d7b75f133..1e09fb7fe4f9 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +++ b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py @@ -41,7 +41,7 @@ # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] @@ -191,14 +191,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/example_vision_autogenerated.py b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py similarity index 98% rename from tests/system/providers/google/cloud/vision/example_vision_autogenerated.py rename to providers/tests/system/google/cloud/vision/example_vision_autogenerated.py index 907386ceb295..11fd3cdd5402 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +++ b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py @@ -59,7 +59,7 @@ # [END howto_operator_vision_reference_image_import] # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] @@ -268,14 +268,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/example_vision_explicit.py b/providers/tests/system/google/cloud/vision/example_vision_explicit.py similarity index 98% rename from tests/system/providers/google/cloud/vision/example_vision_explicit.py rename to providers/tests/system/google/cloud/vision/example_vision_explicit.py index 663be9125968..0c71be95bda7 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_explicit.py +++ b/providers/tests/system/google/cloud/vision/example_vision_explicit.py @@ -54,7 +54,7 @@ # [END howto_operator_vision_product_import_2] # [START howto_operator_vision_reference_image_import_2] from google.cloud.vision_v1.types import ReferenceImage # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_reference_image_import_2] @@ -279,14 +279,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/natural_language/__init__.py b/providers/tests/system/google/cloud/workflows/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/natural_language/__init__.py rename to providers/tests/system/google/cloud/workflows/__init__.py diff --git a/tests/system/providers/google/cloud/workflows/example_workflows.py b/providers/tests/system/google/cloud/workflows/example_workflows.py similarity index 98% rename from tests/system/providers/google/cloud/workflows/example_workflows.py rename to providers/tests/system/google/cloud/workflows/example_workflows.py index b010a146b9bd..6d13484acbb4 100644 --- a/tests/system/providers/google/cloud/workflows/example_workflows.py +++ b/providers/tests/system/google/cloud/workflows/example_workflows.py @@ -227,14 +227,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/conftest.py b/providers/tests/system/google/conftest.py similarity index 100% rename from tests/system/providers/google/conftest.py rename to providers/tests/system/google/conftest.py diff --git a/tests/system/providers/google/cloud/pubsub/__init__.py b/providers/tests/system/google/datacatalog/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/pubsub/__init__.py rename to providers/tests/system/google/datacatalog/__init__.py diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_entries.py rename to providers/tests/system/google/datacatalog/example_datacatalog_entries.py index 47edfb96368f..0f84361d4fcc 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py @@ -37,7 +37,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -199,14 +200,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py rename to providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py index 781d047c5346..a77a023f3233 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py @@ -39,7 +39,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -222,14 +223,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py rename to providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py index b8dd9170c3c0..af49d3de08fa 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py @@ -36,7 +36,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -182,14 +183,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_tags.py rename to providers/tests/system/google/datacatalog/example_datacatalog_tags.py index 17397fcea280..fdbdaa451056 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py @@ -40,7 +40,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -232,14 +233,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/spanner/__init__.py b/providers/tests/system/google/firebase/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/spanner/__init__.py rename to providers/tests/system/google/firebase/__init__.py diff --git a/tests/providers/telegram/operators/__init__.py b/providers/tests/system/google/leveldb/__init__.py similarity index 100% rename from tests/providers/telegram/operators/__init__.py rename to providers/tests/system/google/leveldb/__init__.py diff --git a/tests/system/providers/google/leveldb/example_leveldb.py b/providers/tests/system/google/leveldb/example_leveldb.py similarity index 94% rename from tests/system/providers/google/leveldb/example_leveldb.py rename to providers/tests/system/google/leveldb/example_leveldb.py index 8474de830275..8b11d9e87beb 100644 --- a/tests/system/providers/google/leveldb/example_leveldb.py +++ b/providers/tests/system/google/leveldb/example_leveldb.py @@ -60,14 +60,14 @@ # [END howto_operator_leveldb_put_key] get_key_leveldb_task >> put_key_leveldb_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/hooks/__init__.py b/providers/tests/system/google/marketing_platform/__init__.py similarity index 100% rename from tests/providers/teradata/hooks/__init__.py rename to providers/tests/system/google/marketing_platform/__init__.py diff --git a/tests/system/providers/google/marketing_platform/example_analytics_admin.py b/providers/tests/system/google/marketing_platform/example_analytics_admin.py similarity index 98% rename from tests/system/providers/google/marketing_platform/example_analytics_admin.py rename to providers/tests/system/google/marketing_platform/example_analytics_admin.py index be1ad9f257f9..16ce6f8190bd 100644 --- a/tests/system/providers/google/marketing_platform/example_analytics_admin.py +++ b/providers/tests/system/google/marketing_platform/example_analytics_admin.py @@ -219,13 +219,13 @@ def delete_connection(connection_id: str) -> None: # TEST TEARDOWN >> delete_connection_task ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/marketing_platform/example_campaign_manager.py b/providers/tests/system/google/marketing_platform/example_campaign_manager.py similarity index 98% rename from tests/system/providers/google/marketing_platform/example_campaign_manager.py rename to providers/tests/system/google/marketing_platform/example_campaign_manager.py index 932769eb52ad..0c3b26e5e98b 100644 --- a/tests/system/providers/google/marketing_platform/example_campaign_manager.py +++ b/providers/tests/system/google/marketing_platform/example_campaign_manager.py @@ -55,7 +55,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -321,13 +322,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection(connection_id=CONNECTION_ID) ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/marketing_platform/example_search_ads.py b/providers/tests/system/google/marketing_platform/example_search_ads.py similarity index 97% rename from tests/system/providers/google/marketing_platform/example_search_ads.py rename to providers/tests/system/google/marketing_platform/example_search_ads.py index b8e71de90adc..5d86a3ad1684 100644 --- a/tests/system/providers/google/marketing_platform/example_search_ads.py +++ b/providers/tests/system/google/marketing_platform/example_search_ads.py @@ -102,7 +102,7 @@ (query_report >> get_field >> search_fields >> get_custom_column >> list_custom_columns) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/speech_to_text/__init__.py b/providers/tests/system/google/suite/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/speech_to_text/__init__.py rename to providers/tests/system/google/suite/__init__.py diff --git a/tests/system/providers/google/suite/example_local_to_drive.py b/providers/tests/system/google/suite/example_local_to_drive.py similarity index 97% rename from tests/system/providers/google/suite/example_local_to_drive.py rename to providers/tests/system/google/suite/example_local_to_drive.py index 2e79a932d4fb..ac8ebfc5aabb 100644 --- a/tests/system/providers/google/suite/example_local_to_drive.py +++ b/providers/tests/system/google/suite/example_local_to_drive.py @@ -141,13 +141,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection_task ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/sql_to_sheets/__init__.py b/providers/tests/system/google/suite/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/sql_to_sheets/__init__.py rename to providers/tests/system/google/suite/resources/__init__.py diff --git a/tests/system/providers/google/suite/resources/test1 b/providers/tests/system/google/suite/resources/test1 similarity index 100% rename from tests/system/providers/google/suite/resources/test1 rename to providers/tests/system/google/suite/resources/test1 diff --git a/tests/system/providers/google/suite/resources/test2 b/providers/tests/system/google/suite/resources/test2 similarity index 100% rename from tests/system/providers/google/suite/resources/test2 rename to providers/tests/system/google/suite/resources/test2 diff --git a/tests/system/providers/google/cloud/stackdriver/__init__.py b/providers/tests/system/google/workplace/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/stackdriver/__init__.py rename to providers/tests/system/google/workplace/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/__init__.py b/providers/tests/system/http/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/__init__.py rename to providers/tests/system/http/__init__.py diff --git a/tests/system/providers/http/example_http.py b/providers/tests/system/http/example_http.py similarity index 98% rename from tests/system/providers/http/example_http.py rename to providers/tests/system/http/example_http.py index bf5d08f086c1..98423943607c 100644 --- a/tests/system/providers/http/example_http.py +++ b/providers/tests/system/http/example_http.py @@ -157,7 +157,7 @@ def get_next_page_cursor(response) -> dict | None: task_get_op_response_filter >> task_put_op >> task_del_op >> task_post_op_formenc task_post_op_formenc >> task_get_paginated -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/__init__.py b/providers/tests/system/influxdb/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/__init__.py rename to providers/tests/system/influxdb/__init__.py diff --git a/tests/system/providers/influxdb/example_influxdb.py b/providers/tests/system/influxdb/example_influxdb.py similarity index 94% rename from tests/system/providers/influxdb/example_influxdb.py rename to providers/tests/system/influxdb/example_influxdb.py index 9a71271c02d9..8e4d486742d5 100644 --- a/tests/system/providers/influxdb/example_influxdb.py +++ b/providers/tests/system/influxdb/example_influxdb.py @@ -61,13 +61,13 @@ def test_influxdb_hook(): ) as dag: test_influxdb_hook() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/influxdb/example_influxdb_query.py b/providers/tests/system/influxdb/example_influxdb_query.py similarity index 95% rename from tests/system/providers/influxdb/example_influxdb_query.py rename to providers/tests/system/influxdb/example_influxdb_query.py index 6a0c14781aab..95940e8c8f36 100644 --- a/tests/system/providers/influxdb/example_influxdb_query.py +++ b/providers/tests/system/influxdb/example_influxdb_query.py @@ -43,7 +43,7 @@ # [END howto_operator_influxdb] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/tasks/__init__.py b/providers/tests/system/jdbc/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/tasks/__init__.py rename to providers/tests/system/jdbc/__init__.py diff --git a/tests/system/providers/jdbc/example_jdbc_queries.py b/providers/tests/system/jdbc/example_jdbc_queries.py similarity index 94% rename from tests/system/providers/jdbc/example_jdbc_queries.py rename to providers/tests/system/jdbc/example_jdbc_queries.py index 0da4b8d4479a..ce9234a6fb0b 100644 --- a/tests/system/providers/jdbc/example_jdbc_queries.py +++ b/providers/tests/system/jdbc/example_jdbc_queries.py @@ -59,13 +59,13 @@ delete_data >> insert_data >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/text_to_speech/__init__.py b/providers/tests/system/jenkins/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/text_to_speech/__init__.py rename to providers/tests/system/jenkins/__init__.py diff --git a/tests/system/providers/jenkins/example_jenkins_job_trigger.py b/providers/tests/system/jenkins/example_jenkins_job_trigger.py similarity index 97% rename from tests/system/providers/jenkins/example_jenkins_job_trigger.py rename to providers/tests/system/jenkins/example_jenkins_job_trigger.py index e7fa2e073be9..6b7fab62a0d3 100644 --- a/tests/system/providers/jenkins/example_jenkins_job_trigger.py +++ b/providers/tests/system/jenkins/example_jenkins_job_trigger.py @@ -72,7 +72,7 @@ def grab_artifact_from_jenkins(url): # job_trigger >> grab_artifact_from_jenkins() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/__init__.py b/providers/tests/system/microsoft/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/transfers/__init__.py rename to providers/tests/system/microsoft/__init__.py diff --git a/tests/providers/teradata/operators/__init__.py b/providers/tests/system/microsoft/azure/__init__.py similarity index 100% rename from tests/providers/teradata/operators/__init__.py rename to providers/tests/system/microsoft/azure/__init__.py diff --git a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_adf_run_pipeline.py rename to providers/tests/system/microsoft/azure/example_adf_run_pipeline.py index d883f5128737..a6eae4d71486 100644 --- a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py @@ -108,13 +108,13 @@ # Task dependency created via `XComArgs`: # run_pipeline2 >> pipeline_run_sensor - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_create.py b/providers/tests/system/microsoft/azure/example_adls_create.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_adls_create.py rename to providers/tests/system/microsoft/azure/example_adls_create.py index 726e9eba76ae..3f76525b6b87 100644 --- a/tests/system/providers/microsoft/azure/example_adls_create.py +++ b/providers/tests/system/microsoft/azure/example_adls_create.py @@ -46,13 +46,13 @@ upload_data >> delete_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_delete.py b/providers/tests/system/microsoft/azure/example_adls_delete.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_adls_delete.py rename to providers/tests/system/microsoft/azure/example_adls_delete.py index 2b1977e938dc..34abc6a9b2da 100644 --- a/tests/system/providers/microsoft/azure/example_adls_delete.py +++ b/providers/tests/system/microsoft/azure/example_adls_delete.py @@ -46,13 +46,13 @@ upload_file >> remove_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_list.py b/providers/tests/system/microsoft/azure/example_adls_list.py similarity index 92% rename from tests/system/providers/microsoft/azure/example_adls_list.py rename to providers/tests/system/microsoft/azure/example_adls_list.py index a6bd2d7bd665..594b161aaa9d 100644 --- a/tests/system/providers/microsoft/azure/example_adls_list.py +++ b/providers/tests/system/microsoft/azure/example_adls_list.py @@ -42,13 +42,13 @@ ) # [END howto_operator_adls_list] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_azure_batch_operator.py rename to providers/tests/system/microsoft/azure/example_azure_batch_operator.py index 85977f2a0a6e..944c737f96a0 100644 --- a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py +++ b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py @@ -57,7 +57,7 @@ ) # [END howto_azure_batch_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_container_instances.py b/providers/tests/system/microsoft/azure/example_azure_container_instances.py similarity index 97% rename from tests/system/providers/microsoft/azure/example_azure_container_instances.py rename to providers/tests/system/microsoft/azure/example_azure_container_instances.py index 0db79a71837b..ee9b025193e3 100644 --- a/tests/system/providers/microsoft/azure/example_azure_container_instances.py +++ b/providers/tests/system/microsoft/azure/example_azure_container_instances.py @@ -90,7 +90,7 @@ cpu=1.0, task_id="start_container_with_azure_container_volume", ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_azure_cosmosdb.py rename to providers/tests/system/microsoft/azure/example_azure_cosmosdb.py index d48d636f28db..1d4bb60b0516 100644 --- a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py +++ b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py @@ -64,13 +64,13 @@ t1 >> t2 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_service_bus.py b/providers/tests/system/microsoft/azure/example_azure_service_bus.py similarity index 98% rename from tests/system/providers/microsoft/azure/example_azure_service_bus.py rename to providers/tests/system/microsoft/azure/example_azure_service_bus.py index e0b8558ffec1..bfa77db40dba 100644 --- a/tests/system/providers/microsoft/azure/example_azure_service_bus.py +++ b/providers/tests/system/microsoft/azure/example_azure_service_bus.py @@ -172,13 +172,13 @@ delete_service_bus_queue, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_synapse.py b/providers/tests/system/microsoft/azure/example_azure_synapse.py similarity index 97% rename from tests/system/providers/microsoft/azure/example_azure_synapse.py rename to providers/tests/system/microsoft/azure/example_azure_synapse.py index 7465b71677de..c7f7800e5783 100644 --- a/tests/system/providers/microsoft/azure/example_azure_synapse.py +++ b/providers/tests/system/microsoft/azure/example_azure_synapse.py @@ -69,7 +69,7 @@ ) # [END howto_operator_azure_synapse] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_fileshare.py b/providers/tests/system/microsoft/azure/example_fileshare.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_fileshare.py rename to providers/tests/system/microsoft/azure/example_fileshare.py index f306a0188efa..bfa9819dbef6 100644 --- a/tests/system/providers/microsoft/azure/example_fileshare.py +++ b/providers/tests/system/microsoft/azure/example_fileshare.py @@ -55,13 +55,13 @@ def delete_fileshare(): ) as dag: create_fileshare() >> delete_fileshare() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_local_to_adls.py b/providers/tests/system/microsoft/azure/example_local_to_adls.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_local_to_adls.py rename to providers/tests/system/microsoft/azure/example_local_to_adls.py index f5a75e7ce414..d540aaf34d99 100644 --- a/tests/system/providers/microsoft/azure/example_local_to_adls.py +++ b/providers/tests/system/microsoft/azure/example_local_to_adls.py @@ -47,13 +47,13 @@ upload_file >> delete_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_local_to_wasb.py b/providers/tests/system/microsoft/azure/example_local_to_wasb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_local_to_wasb.py rename to providers/tests/system/microsoft/azure/example_local_to_wasb.py index b03c11e6b367..7e368ca83992 100644 --- a/tests/system/providers/microsoft/azure/example_local_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_local_to_wasb.py @@ -49,13 +49,13 @@ upload >> delete - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_msfabric.py b/providers/tests/system/microsoft/azure/example_msfabric.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_msfabric.py rename to providers/tests/system/microsoft/azure/example_msfabric.py index 5f8b0657c401..b1113025286d 100644 --- a/tests/system/providers/microsoft/azure/example_msfabric.py +++ b/providers/tests/system/microsoft/azure/example_msfabric.py @@ -51,13 +51,13 @@ ) # [END howto_operator_ms_fabric_create_item_schedule] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_msgraph.py b/providers/tests/system/microsoft/azure/example_msgraph.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_msgraph.py rename to providers/tests/system/microsoft/azure/example_msgraph.py index 5ff7ba6f8883..33ee00468523 100644 --- a/tests/system/providers/microsoft/azure/example_msgraph.py +++ b/providers/tests/system/microsoft/azure/example_msgraph.py @@ -49,13 +49,13 @@ site_task >> site_pages_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_powerbi.py b/providers/tests/system/microsoft/azure/example_powerbi.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_powerbi.py rename to providers/tests/system/microsoft/azure/example_powerbi.py index 0a1bfde54a7a..b5a982a1159a 100644 --- a/tests/system/providers/microsoft/azure/example_powerbi.py +++ b/providers/tests/system/microsoft/azure/example_powerbi.py @@ -97,13 +97,13 @@ workspaces_task >> workspaces_info_task >> check_workspace_status_task refresh_dataset_task >> refresh_dataset_history_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py similarity index 95% rename from tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py rename to providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py index 52f1f001e998..c02cec3e57f5 100644 --- a/tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py +++ b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py @@ -76,13 +76,13 @@ def create_connection(conn_id_name: str): refresh_powerbi_dataset, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_s3_to_wasb.py b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_s3_to_wasb.py rename to providers/tests/system/microsoft/azure/example_s3_to_wasb.py index 48fd428b9cde..88ae64324d17 100644 --- a/tests/system/providers/microsoft/azure/example_s3_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py @@ -29,7 +29,8 @@ ) from airflow.providers.microsoft.azure.transfers.s3_to_wasb import S3ToAzureBlobStorageOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -102,13 +103,13 @@ remove_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure when "tearDown" task with trigger # rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py similarity index 95% rename from tests/system/providers/microsoft/azure/example_sftp_to_wasb.py rename to providers/tests/system/microsoft/azure/example_sftp_to_wasb.py index d80539cb4a23..42e5968304f0 100644 --- a/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py @@ -75,13 +75,13 @@ def delete_sftp_file(): transfer_files_to_sftp_step >> transfer_files_to_azure >> delete_blob_file_step >> delete_sftp_file() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py rename to providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py index 6b69bd8972b4..de87d6b4c248 100644 --- a/tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py @@ -43,13 +43,13 @@ # [END howto_operator_azure_synapse_run_pipeline] begin >> run_pipeline1 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_wasb_sensors.py b/providers/tests/system/microsoft/azure/example_wasb_sensors.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_wasb_sensors.py rename to providers/tests/system/microsoft/azure/example_wasb_sensors.py index 806a863cbfbc..56c1dce34d68 100644 --- a/tests/system/providers/microsoft/azure/example_wasb_sensors.py +++ b/providers/tests/system/microsoft/azure/example_wasb_sensors.py @@ -62,7 +62,7 @@ # [END wasb_prefix_sensor] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/resources/__init__.py b/providers/tests/system/microsoft/mssql/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/transfers/resources/__init__.py rename to providers/tests/system/microsoft/mssql/__init__.py diff --git a/tests/system/providers/microsoft/mssql/create_table.sql b/providers/tests/system/microsoft/mssql/create_table.sql similarity index 100% rename from tests/system/providers/microsoft/mssql/create_table.sql rename to providers/tests/system/microsoft/mssql/create_table.sql diff --git a/tests/system/providers/microsoft/mssql/example_mssql.py b/providers/tests/system/microsoft/mssql/example_mssql.py similarity index 97% rename from tests/system/providers/microsoft/mssql/example_mssql.py rename to providers/tests/system/microsoft/mssql/example_mssql.py index 957a8a26c298..12e2815c59d0 100644 --- a/tests/system/providers/microsoft/mssql/example_mssql.py +++ b/providers/tests/system/microsoft/mssql/example_mssql.py @@ -146,12 +146,12 @@ def insert_mssql_hook(): ) # [END mssql_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/translate/__init__.py b/providers/tests/system/microsoft/winrm/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/translate/__init__.py rename to providers/tests/system/microsoft/winrm/__init__.py diff --git a/tests/system/providers/microsoft/winrm/example_winrm.py b/providers/tests/system/microsoft/winrm/example_winrm.py similarity index 95% rename from tests/system/providers/microsoft/winrm/example_winrm.py rename to providers/tests/system/microsoft/winrm/example_winrm.py index 60a0149dc235..1de1cb8c0fd6 100644 --- a/tests/system/providers/microsoft/winrm/example_winrm.py +++ b/providers/tests/system/microsoft/winrm/example_winrm.py @@ -64,13 +64,13 @@ [t1, t2, t3] >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/translate_speech/__init__.py b/providers/tests/system/mysql/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/translate_speech/__init__.py rename to providers/tests/system/mysql/__init__.py diff --git a/tests/system/providers/mysql/example_mysql.py b/providers/tests/system/mysql/example_mysql.py similarity index 95% rename from tests/system/providers/mysql/example_mysql.py rename to providers/tests/system/mysql/example_mysql.py index a890b7846ec9..6f0f884197f3 100644 --- a/tests/system/providers/mysql/example_mysql.py +++ b/providers/tests/system/mysql/example_mysql.py @@ -59,7 +59,7 @@ drop_table_mysql_task >> mysql_task -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/__init__.py b/providers/tests/system/neo4j/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/vertex_ai/__init__.py rename to providers/tests/system/neo4j/__init__.py diff --git a/tests/system/providers/neo4j/example_neo4j.py b/providers/tests/system/neo4j/example_neo4j.py similarity index 95% rename from tests/system/providers/neo4j/example_neo4j.py rename to providers/tests/system/neo4j/example_neo4j.py index 0aea16f736db..80db3fbb2ab1 100644 --- a/tests/system/providers/neo4j/example_neo4j.py +++ b/providers/tests/system/neo4j/example_neo4j.py @@ -48,7 +48,7 @@ # [END run_query_neo4j_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/video_intelligence/__init__.py b/providers/tests/system/openai/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/video_intelligence/__init__.py rename to providers/tests/system/openai/__init__.py diff --git a/tests/system/providers/openai/example_openai.py b/providers/tests/system/openai/example_openai.py similarity index 97% rename from tests/system/providers/openai/example_openai.py rename to providers/tests/system/openai/example_openai.py index ec8dd316b3ed..d342207b5d09 100644 --- a/tests/system/providers/openai/example_openai.py +++ b/providers/tests/system/openai/example_openai.py @@ -104,7 +104,7 @@ def task_to_store_input_text_in_xcom(): example_openai_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/openai/example_trigger_batch_operator.py b/providers/tests/system/openai/example_trigger_batch_operator.py similarity index 97% rename from tests/system/providers/openai/example_trigger_batch_operator.py rename to providers/tests/system/openai/example_trigger_batch_operator.py index 6f01f648ccc7..3dae1df5d9d0 100644 --- a/tests/system/providers/openai/example_trigger_batch_operator.py +++ b/providers/tests/system/openai/example_trigger_batch_operator.py @@ -111,7 +111,7 @@ def cleanup_batch_output_file(batch_id, **context): openai_batch_chat_completions() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/__init__.py b/providers/tests/system/opensearch/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/vision/__init__.py rename to providers/tests/system/opensearch/__init__.py diff --git a/tests/system/providers/opensearch/example_opensearch.py b/providers/tests/system/opensearch/example_opensearch.py similarity index 96% rename from tests/system/providers/opensearch/example_opensearch.py rename to providers/tests/system/opensearch/example_opensearch.py index b82af8ebddf1..e339fbe78848 100644 --- a/tests/system/providers/opensearch/example_opensearch.py +++ b/providers/tests/system/opensearch/example_opensearch.py @@ -118,14 +118,14 @@ def load_connections(): chain(create_index, add_document_by_class, add_document_by_args, search_high_level, search_low_level) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/workflows/__init__.py b/providers/tests/system/opsgenie/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/workflows/__init__.py rename to providers/tests/system/opsgenie/__init__.py diff --git a/tests/system/providers/opsgenie/example_opsgenie_alert.py b/providers/tests/system/opsgenie/example_opsgenie_alert.py similarity index 96% rename from tests/system/providers/opsgenie/example_opsgenie_alert.py rename to providers/tests/system/opsgenie/example_opsgenie_alert.py index 30fdabc0d5e9..cf11f824eec8 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_alert.py +++ b/providers/tests/system/opsgenie/example_opsgenie_alert.py @@ -51,7 +51,7 @@ ) # [END howto_opsgenie_delete_alert_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opsgenie/example_opsgenie_notifier.py b/providers/tests/system/opsgenie/example_opsgenie_notifier.py similarity index 96% rename from tests/system/providers/opsgenie/example_opsgenie_notifier.py rename to providers/tests/system/opsgenie/example_opsgenie_notifier.py index 10edf8debdae..97f5945aa977 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_notifier.py +++ b/providers/tests/system/opsgenie/example_opsgenie_notifier.py @@ -37,7 +37,7 @@ ) # [END howto_notifier_opsgenie] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/transfers/__init__.py b/providers/tests/system/papermill/__init__.py similarity index 100% rename from tests/providers/teradata/transfers/__init__.py rename to providers/tests/system/papermill/__init__.py diff --git a/tests/system/providers/papermill/conftest.py b/providers/tests/system/papermill/conftest.py similarity index 100% rename from tests/system/providers/papermill/conftest.py rename to providers/tests/system/papermill/conftest.py diff --git a/tests/system/providers/papermill/example_papermill.py b/providers/tests/system/papermill/example_papermill.py similarity index 96% rename from tests/system/providers/papermill/example_papermill.py rename to providers/tests/system/papermill/example_papermill.py index 199fd6f64d28..529647fe4e71 100644 --- a/tests/system/providers/papermill/example_papermill.py +++ b/providers/tests/system/papermill/example_papermill.py @@ -52,7 +52,7 @@ ) # [END howto_operator_papermill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/example_papermill_remote_verify.py b/providers/tests/system/papermill/example_papermill_remote_verify.py similarity index 97% rename from tests/system/providers/papermill/example_papermill_remote_verify.py rename to providers/tests/system/papermill/example_papermill_remote_verify.py index b4cf6249be24..f325928b8cd0 100644 --- a/tests/system/providers/papermill/example_papermill_remote_verify.py +++ b/providers/tests/system/papermill/example_papermill_remote_verify.py @@ -75,7 +75,7 @@ def check_notebook(output_notebook, execution_date): ) # [END howto_verify_operator_papermill_remote_kernel] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/example_papermill_verify.py b/providers/tests/system/papermill/example_papermill_verify.py similarity index 97% rename from tests/system/providers/papermill/example_papermill_verify.py rename to providers/tests/system/papermill/example_papermill_verify.py index af24dbdf8ff7..cfae2cc6ed3e 100644 --- a/tests/system/providers/papermill/example_papermill_verify.py +++ b/providers/tests/system/papermill/example_papermill_verify.py @@ -73,7 +73,7 @@ def check_notebook(inlets, execution_date): run_this >> check_notebook(inlets=AUTO, execution_date="{{ execution_date }}") # [END howto_verify_operator_papermill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/input_notebook.ipynb b/providers/tests/system/papermill/input_notebook.ipynb similarity index 100% rename from tests/system/providers/papermill/input_notebook.ipynb rename to providers/tests/system/papermill/input_notebook.ipynb diff --git a/tests/system/providers/google/datacatalog/__init__.py b/providers/tests/system/pgvector/__init__.py similarity index 100% rename from tests/system/providers/google/datacatalog/__init__.py rename to providers/tests/system/pgvector/__init__.py diff --git a/tests/system/providers/pgvector/example_pgvector.py b/providers/tests/system/pgvector/example_pgvector.py similarity index 97% rename from tests/system/providers/pgvector/example_pgvector.py rename to providers/tests/system/pgvector/example_pgvector.py index 8d1d0f0b3703..5c16b9da307c 100644 --- a/tests/system/providers/pgvector/example_pgvector.py +++ b/providers/tests/system/pgvector/example_pgvector.py @@ -76,7 +76,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pgvector/example_pgvector_openai.py b/providers/tests/system/pgvector/example_pgvector_openai.py similarity index 97% rename from tests/system/providers/pgvector/example_pgvector_openai.py rename to providers/tests/system/pgvector/example_pgvector_openai.py index 9b7999436f34..9c8f841e8f93 100644 --- a/tests/system/providers/pgvector/example_pgvector_openai.py +++ b/providers/tests/system/pgvector/example_pgvector_openai.py @@ -90,7 +90,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/firebase/__init__.py b/providers/tests/system/pinecone/__init__.py similarity index 100% rename from tests/system/providers/google/firebase/__init__.py rename to providers/tests/system/pinecone/__init__.py diff --git a/tests/system/providers/pinecone/example_create_pod_index.py b/providers/tests/system/pinecone/example_create_pod_index.py similarity index 96% rename from tests/system/providers/pinecone/example_create_pod_index.py rename to providers/tests/system/pinecone/example_create_pod_index.py index a2d7f16c6934..e0342e2d6200 100644 --- a/tests/system/providers/pinecone/example_create_pod_index.py +++ b/providers/tests/system/pinecone/example_create_pod_index.py @@ -56,7 +56,7 @@ def delete_index(): create_index >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_create_serverless_index.py b/providers/tests/system/pinecone/example_create_serverless_index.py similarity index 96% rename from tests/system/providers/pinecone/example_create_serverless_index.py rename to providers/tests/system/pinecone/example_create_serverless_index.py index cf1e2c5cee4b..28078b1b608c 100644 --- a/tests/system/providers/pinecone/example_create_serverless_index.py +++ b/providers/tests/system/pinecone/example_create_serverless_index.py @@ -55,7 +55,7 @@ def delete_index(): create_index >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_dag_pinecone.py b/providers/tests/system/pinecone/example_dag_pinecone.py similarity index 95% rename from tests/system/providers/pinecone/example_dag_pinecone.py rename to providers/tests/system/pinecone/example_dag_pinecone.py index 8bf59febeca9..744f6518dcea 100644 --- a/tests/system/providers/pinecone/example_dag_pinecone.py +++ b/providers/tests/system/pinecone/example_dag_pinecone.py @@ -46,7 +46,7 @@ # [END howto_operator_pinecone_ingest] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_pinecone_cohere.py b/providers/tests/system/pinecone/example_pinecone_cohere.py similarity index 97% rename from tests/system/providers/pinecone/example_pinecone_cohere.py rename to providers/tests/system/pinecone/example_pinecone_cohere.py index 80e6766484d6..80e9b1efad8e 100644 --- a/tests/system/providers/pinecone/example_pinecone_cohere.py +++ b/providers/tests/system/pinecone/example_pinecone_cohere.py @@ -76,7 +76,7 @@ def delete_index(): create_index() >> embed_task >> transformed_output >> perform_ingestion >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_pinecone_openai.py b/providers/tests/system/pinecone/example_pinecone_openai.py similarity index 98% rename from tests/system/providers/pinecone/example_pinecone_openai.py rename to providers/tests/system/pinecone/example_pinecone_openai.py index d338e25542ce..dad83f48085a 100644 --- a/tests/system/providers/pinecone/example_pinecone_openai.py +++ b/providers/tests/system/pinecone/example_pinecone_openai.py @@ -107,7 +107,7 @@ def delete_index(): create_index >> embed_task >> perform_ingestion >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/suite/__init__.py b/providers/tests/system/postgres/__init__.py similarity index 100% rename from tests/system/providers/google/suite/__init__.py rename to providers/tests/system/postgres/__init__.py diff --git a/tests/system/providers/postgres/example_postgres.py b/providers/tests/system/postgres/example_postgres.py similarity index 96% rename from tests/system/providers/postgres/example_postgres.py rename to providers/tests/system/postgres/example_postgres.py index aab5c1951799..5ffaf6ea0098 100644 --- a/tests/system/providers/postgres/example_postgres.py +++ b/providers/tests/system/postgres/example_postgres.py @@ -80,13 +80,13 @@ create_pet_table >> populate_pet_table >> get_all_pets >> get_birth_date # [END postgres_sql_execute_query_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/suite/resources/__init__.py b/providers/tests/system/presto/__init__.py similarity index 100% rename from tests/system/providers/google/suite/resources/__init__.py rename to providers/tests/system/presto/__init__.py diff --git a/tests/system/providers/presto/example_gcs_to_presto.py b/providers/tests/system/presto/example_gcs_to_presto.py similarity index 95% rename from tests/system/providers/presto/example_gcs_to_presto.py rename to providers/tests/system/presto/example_gcs_to_presto.py index 19b4a3a4fba5..ceccf098471d 100644 --- a/tests/system/providers/presto/example_gcs_to_presto.py +++ b/providers/tests/system/presto/example_gcs_to_presto.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_presto_table] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/workplace/__init__.py b/providers/tests/system/qdrant/__init__.py similarity index 100% rename from tests/system/providers/google/workplace/__init__.py rename to providers/tests/system/qdrant/__init__.py diff --git a/tests/system/providers/qdrant/example_dag_qdrant.py b/providers/tests/system/qdrant/example_dag_qdrant.py similarity index 96% rename from tests/system/providers/qdrant/example_dag_qdrant.py rename to providers/tests/system/qdrant/example_dag_qdrant.py index 8f55d2d72c55..85c92d9c9931 100644 --- a/tests/system/providers/qdrant/example_dag_qdrant.py +++ b/providers/tests/system/qdrant/example_dag_qdrant.py @@ -43,7 +43,7 @@ # [END howto_operator_qdrant_ingest] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/http/__init__.py b/providers/tests/system/redis/__init__.py similarity index 100% rename from tests/system/providers/http/__init__.py rename to providers/tests/system/redis/__init__.py diff --git a/tests/system/providers/redis/example_redis_publish.py b/providers/tests/system/redis/example_redis_publish.py similarity index 95% rename from tests/system/providers/redis/example_redis_publish.py rename to providers/tests/system/redis/example_redis_publish.py index 9d50593c0400..335256f68e57 100644 --- a/tests/system/providers/redis/example_redis_publish.py +++ b/providers/tests/system/redis/example_redis_publish.py @@ -73,13 +73,13 @@ publish_task >> key_sensor_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/influxdb/__init__.py b/providers/tests/system/salesforce/__init__.py similarity index 100% rename from tests/system/providers/influxdb/__init__.py rename to providers/tests/system/salesforce/__init__.py diff --git a/tests/system/providers/salesforce/example_bulk.py b/providers/tests/system/salesforce/example_bulk.py similarity index 97% rename from tests/system/providers/salesforce/example_bulk.py rename to providers/tests/system/salesforce/example_bulk.py index 06764aaf54b4..7d43de648805 100644 --- a/tests/system/providers/salesforce/example_bulk.py +++ b/providers/tests/system/salesforce/example_bulk.py @@ -90,7 +90,7 @@ # [END howto_salesforce_bulk_delete_operation] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/salesforce/example_salesforce_apex_rest.py b/providers/tests/system/salesforce/example_salesforce_apex_rest.py similarity index 95% rename from tests/system/providers/salesforce/example_salesforce_apex_rest.py rename to providers/tests/system/salesforce/example_salesforce_apex_rest.py index 9bfdb9483962..2961ffa4e478 100644 --- a/tests/system/providers/salesforce/example_salesforce_apex_rest.py +++ b/providers/tests/system/salesforce/example_salesforce_apex_rest.py @@ -41,7 +41,7 @@ # [END howto_salesforce_apex_rest_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/jdbc/__init__.py b/providers/tests/system/samba/__init__.py similarity index 100% rename from tests/system/providers/jdbc/__init__.py rename to providers/tests/system/samba/__init__.py diff --git a/tests/system/providers/samba/example_gcs_to_samba.py b/providers/tests/system/samba/example_gcs_to_samba.py similarity index 96% rename from tests/system/providers/samba/example_gcs_to_samba.py rename to providers/tests/system/samba/example_gcs_to_samba.py index c03a6f129cea..d7717fcbfc72 100644 --- a/tests/system/providers/samba/example_gcs_to_samba.py +++ b/providers/tests/system/samba/example_gcs_to_samba.py @@ -30,7 +30,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.samba.transfers.gcs_to_samba import GCSToSambaOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -137,14 +138,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/jenkins/__init__.py b/providers/tests/system/sftp/__init__.py similarity index 100% rename from tests/system/providers/jenkins/__init__.py rename to providers/tests/system/sftp/__init__.py diff --git a/tests/system/providers/sftp/example_sftp_sensor.py b/providers/tests/system/sftp/example_sftp_sensor.py similarity index 96% rename from tests/system/providers/sftp/example_sftp_sensor.py rename to providers/tests/system/sftp/example_sftp_sensor.py index df705b4b871f..a3bde8a03af0 100644 --- a/tests/system/providers/sftp/example_sftp_sensor.py +++ b/providers/tests/system/sftp/example_sftp_sensor.py @@ -89,14 +89,14 @@ def sftp_sensor_decorator(): >> remove_file_task_end ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/mssql/__init__.py b/providers/tests/system/singularity/__init__.py similarity index 100% rename from tests/system/providers/microsoft/mssql/__init__.py rename to providers/tests/system/singularity/__init__.py diff --git a/tests/system/providers/singularity/example_singularity.py b/providers/tests/system/singularity/example_singularity.py similarity index 95% rename from tests/system/providers/singularity/example_singularity.py rename to providers/tests/system/singularity/example_singularity.py index 4b60c080dcd2..e80a0f40eb95 100644 --- a/tests/system/providers/singularity/example_singularity.py +++ b/providers/tests/system/singularity/example_singularity.py @@ -50,7 +50,7 @@ t3 >> t4 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/winrm/__init__.py b/providers/tests/system/slack/__init__.py similarity index 100% rename from tests/system/providers/microsoft/winrm/__init__.py rename to providers/tests/system/slack/__init__.py diff --git a/tests/system/providers/slack/example_slack.py b/providers/tests/system/slack/example_slack.py similarity index 97% rename from tests/system/providers/slack/example_slack.py rename to providers/tests/system/slack/example_slack.py index c20cf8afa815..f122dffca278 100644 --- a/tests/system/providers/slack/example_slack.py +++ b/providers/tests/system/slack/example_slack.py @@ -94,7 +94,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_slack_webhook.py b/providers/tests/system/slack/example_slack_webhook.py similarity index 97% rename from tests/system/providers/slack/example_slack_webhook.py rename to providers/tests/system/slack/example_slack_webhook.py index 98905d1b9ee6..7e45dc92d921 100644 --- a/tests/system/providers/slack/example_slack_webhook.py +++ b/providers/tests/system/slack/example_slack_webhook.py @@ -69,7 +69,7 @@ slack_webhook_operator_text >> slack_webhook_operator_blocks -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_sql_to_slack.py b/providers/tests/system/slack/example_sql_to_slack.py similarity index 96% rename from tests/system/providers/slack/example_sql_to_slack.py rename to providers/tests/system/slack/example_sql_to_slack.py index 2e672df9aa68..457bcecaea67 100644 --- a/tests/system/providers/slack/example_sql_to_slack.py +++ b/providers/tests/system/slack/example_sql_to_slack.py @@ -52,7 +52,7 @@ # [END howto_operator_sql_to_slack_api_file] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_sql_to_slack_webhook.py b/providers/tests/system/slack/example_sql_to_slack_webhook.py similarity index 95% rename from tests/system/providers/slack/example_sql_to_slack_webhook.py rename to providers/tests/system/slack/example_sql_to_slack_webhook.py index 97bb8ae46ed2..44fad156657b 100644 --- a/tests/system/providers/slack/example_sql_to_slack_webhook.py +++ b/providers/tests/system/slack/example_sql_to_slack_webhook.py @@ -51,7 +51,7 @@ # [END howto_operator_sql_to_slack_webhook] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/mysql/__init__.py b/providers/tests/system/snowflake/__init__.py similarity index 100% rename from tests/system/providers/mysql/__init__.py rename to providers/tests/system/snowflake/__init__.py diff --git a/tests/system/providers/snowflake/example_copy_into_snowflake.py b/providers/tests/system/snowflake/example_copy_into_snowflake.py similarity index 96% rename from tests/system/providers/snowflake/example_copy_into_snowflake.py rename to providers/tests/system/snowflake/example_copy_into_snowflake.py index e25112b95f3e..32eb9699d74b 100644 --- a/tests/system/providers/snowflake/example_copy_into_snowflake.py +++ b/providers/tests/system/snowflake/example_copy_into_snowflake.py @@ -60,7 +60,7 @@ # [END howto_operator_s3_copy_into_snowflake] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowflake.py b/providers/tests/system/snowflake/example_snowflake.py similarity index 97% rename from tests/system/providers/snowflake/example_snowflake.py rename to providers/tests/system/snowflake/example_snowflake.py index 1a6f12080d74..e851b13c0820 100644 --- a/tests/system/providers/snowflake/example_snowflake.py +++ b/providers/tests/system/snowflake/example_snowflake.py @@ -96,7 +96,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowflake_snowflake_op_template_file.sql b/providers/tests/system/snowflake/example_snowflake_snowflake_op_template_file.sql similarity index 100% rename from tests/system/providers/snowflake/example_snowflake_snowflake_op_template_file.sql rename to providers/tests/system/snowflake/example_snowflake_snowflake_op_template_file.sql diff --git a/tests/system/providers/snowflake/example_snowpark_decorator.py b/providers/tests/system/snowflake/example_snowpark_decorator.py similarity index 97% rename from tests/system/providers/snowflake/example_snowpark_decorator.py rename to providers/tests/system/snowflake/example_snowpark_decorator.py index 1a303b1fdfa5..3f74b97d0f23 100644 --- a/tests/system/providers/snowflake/example_snowpark_decorator.py +++ b/providers/tests/system/snowflake/example_snowpark_decorator.py @@ -79,7 +79,8 @@ def check_num_rows(table_name: str): check_num_rows(table_name) # [END howto_decorator_snowpark] -from tests.system.utils import get_test_run # noqa: E402 + +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowpark_operator.py b/providers/tests/system/snowflake/example_snowpark_operator.py similarity index 97% rename from tests/system/providers/snowflake/example_snowpark_operator.py rename to providers/tests/system/snowflake/example_snowpark_operator.py index 090a0f53a4bd..0543596ec9b1 100644 --- a/tests/system/providers/snowflake/example_snowpark_operator.py +++ b/providers/tests/system/snowflake/example_snowpark_operator.py @@ -88,7 +88,8 @@ def check_num_rows(table_name: str): setup_data_operator >> check_num_rows_operator # [END howto_operator_snowpark] -from tests.system.utils import get_test_run # noqa: E402 + +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/neo4j/__init__.py b/providers/tests/system/sqlite/__init__.py similarity index 100% rename from tests/system/providers/neo4j/__init__.py rename to providers/tests/system/sqlite/__init__.py diff --git a/tests/system/providers/sqlite/create_table.sql b/providers/tests/system/sqlite/create_table.sql similarity index 100% rename from tests/system/providers/sqlite/create_table.sql rename to providers/tests/system/sqlite/create_table.sql diff --git a/tests/system/providers/sqlite/example_sqlite.py b/providers/tests/system/sqlite/example_sqlite.py similarity index 96% rename from tests/system/providers/sqlite/example_sqlite.py rename to providers/tests/system/sqlite/example_sqlite.py index 317da9af3780..1ea0270a0df2 100644 --- a/tests/system/providers/sqlite/example_sqlite.py +++ b/providers/tests/system/sqlite/example_sqlite.py @@ -91,13 +91,13 @@ def replace_sqlite_hook(): >> replace_sqlite_hook() ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/openai/__init__.py b/providers/tests/system/tableau/__init__.py similarity index 100% rename from tests/system/providers/openai/__init__.py rename to providers/tests/system/tableau/__init__.py diff --git a/tests/system/providers/tableau/example_tableau.py b/providers/tests/system/tableau/example_tableau.py similarity index 97% rename from tests/system/providers/tableau/example_tableau.py rename to providers/tests/system/tableau/example_tableau.py index c5f6995b62fe..1472511a66c9 100644 --- a/tests/system/providers/tableau/example_tableau.py +++ b/providers/tests/system/tableau/example_tableau.py @@ -71,7 +71,7 @@ # task_refresh_workbook_non_blocking >> task_check_job_status -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opensearch/__init__.py b/providers/tests/system/telegram/__init__.py similarity index 100% rename from tests/system/providers/opensearch/__init__.py rename to providers/tests/system/telegram/__init__.py diff --git a/tests/system/providers/telegram/example_telegram.py b/providers/tests/system/telegram/example_telegram.py similarity index 95% rename from tests/system/providers/telegram/example_telegram.py rename to providers/tests/system/telegram/example_telegram.py index 18d734f3c69e..a7e9018cf85f 100644 --- a/tests/system/providers/telegram/example_telegram.py +++ b/providers/tests/system/telegram/example_telegram.py @@ -45,7 +45,7 @@ # [END howto_operator_telegram] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opsgenie/__init__.py b/providers/tests/system/teradata/__init__.py similarity index 100% rename from tests/system/providers/opsgenie/__init__.py rename to providers/tests/system/teradata/__init__.py diff --git a/tests/system/providers/teradata/create_ssl_table.sql b/providers/tests/system/teradata/create_ssl_table.sql similarity index 100% rename from tests/system/providers/teradata/create_ssl_table.sql rename to providers/tests/system/teradata/create_ssl_table.sql diff --git a/tests/system/providers/teradata/create_table.sql b/providers/tests/system/teradata/create_table.sql similarity index 100% rename from tests/system/providers/teradata/create_table.sql rename to providers/tests/system/teradata/create_table.sql diff --git a/tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py similarity index 98% rename from tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py rename to providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py index bcb1dd2fe6c5..26fd27554d5d 100644 --- a/tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py @@ -205,13 +205,13 @@ ) # [END azure_blob_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_s3_to_teradata_transfer.py b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py similarity index 98% rename from tests/system/providers/teradata/example_s3_to_teradata_transfer.py rename to providers/tests/system/teradata/example_s3_to_teradata_transfer.py index ae8b827c1efa..ab361990b21e 100644 --- a/tests/system/providers/teradata/example_s3_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py @@ -206,13 +206,13 @@ ) # [END s3_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_ssl_teradata.py b/providers/tests/system/teradata/example_ssl_teradata.py similarity index 97% rename from tests/system/providers/teradata/example_ssl_teradata.py rename to providers/tests/system/teradata/example_ssl_teradata.py index 1673bd791a79..3836715a8e63 100644 --- a/tests/system/providers/teradata/example_ssl_teradata.py +++ b/providers/tests/system/teradata/example_ssl_teradata.py @@ -121,13 +121,13 @@ # [END teradata_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata.py b/providers/tests/system/teradata/example_teradata.py similarity index 97% rename from tests/system/providers/teradata/example_teradata.py rename to providers/tests/system/teradata/example_teradata.py index 1fd587cdf8f7..ae41effce7fe 100644 --- a/tests/system/providers/teradata/example_teradata.py +++ b/providers/tests/system/teradata/example_teradata.py @@ -164,13 +164,13 @@ # [END teradata_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_call_sp.py b/providers/tests/system/teradata/example_teradata_call_sp.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_call_sp.py rename to providers/tests/system/teradata/example_teradata_call_sp.py index 98ce85fffdfd..f48583d83ec5 100644 --- a/tests/system/providers/teradata/example_teradata_call_sp.py +++ b/providers/tests/system/teradata/example_teradata_call_sp.py @@ -162,13 +162,13 @@ # [END howto_teradata_operator_for_sp] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_compute_cluster.py b/providers/tests/system/teradata/example_teradata_compute_cluster.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_compute_cluster.py rename to providers/tests/system/teradata/example_teradata_compute_cluster.py index 3fefe9858770..210cde7816bc 100644 --- a/tests/system/providers/teradata/example_teradata_compute_cluster.py +++ b/providers/tests/system/teradata/example_teradata_compute_cluster.py @@ -146,13 +146,13 @@ # [END teradata_vantage_lake_compute_cluster_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_to_teradata_transfer.py b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_to_teradata_transfer.py rename to providers/tests/system/teradata/example_teradata_to_teradata_transfer.py index ac2517a33f51..7f315202606f 100644 --- a/tests/system/providers/teradata/example_teradata_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py @@ -148,13 +148,13 @@ ) # [END teradata_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pgvector/__init__.py b/providers/tests/system/trino/__init__.py similarity index 100% rename from tests/system/providers/pgvector/__init__.py rename to providers/tests/system/trino/__init__.py diff --git a/tests/system/providers/trino/example_gcs_to_trino.py b/providers/tests/system/trino/example_gcs_to_trino.py similarity index 95% rename from tests/system/providers/trino/example_gcs_to_trino.py rename to providers/tests/system/trino/example_gcs_to_trino.py index e970e4d14671..e0249101eb1a 100644 --- a/tests/system/providers/trino/example_gcs_to_trino.py +++ b/providers/tests/system/trino/example_gcs_to_trino.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_trino_table] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/trino/example_trino.py b/providers/tests/system/trino/example_trino.py similarity index 97% rename from tests/system/providers/trino/example_trino.py rename to providers/tests/system/trino/example_trino.py index 91e1c9dfce6a..d708d9b7b8fb 100644 --- a/tests/system/providers/trino/example_trino.py +++ b/providers/tests/system/trino/example_trino.py @@ -91,7 +91,7 @@ # [END howto_operator_trino] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/__init__.py b/providers/tests/system/weaviate/__init__.py similarity index 100% rename from tests/system/providers/pinecone/__init__.py rename to providers/tests/system/weaviate/__init__.py diff --git a/tests/system/providers/weaviate/example_weaviate_cohere.py b/providers/tests/system/weaviate/example_weaviate_cohere.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_cohere.py rename to providers/tests/system/weaviate/example_weaviate_cohere.py index d8662e464bd4..1db5e2cccb72 100644 --- a/tests/system/providers/weaviate/example_weaviate_cohere.py +++ b/providers/tests/system/weaviate/example_weaviate_cohere.py @@ -115,7 +115,7 @@ def delete_weaviate_collections(): example_weaviate_cohere() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py similarity index 97% rename from tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py rename to providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py index 5f998cc52e86..38c0168a9f61 100644 --- a/tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py @@ -92,7 +92,7 @@ def delete_weaviate_collection(collection_name): example_weaviate_dynamic_mapping_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_openai.py b/providers/tests/system/weaviate/example_weaviate_openai.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_openai.py rename to providers/tests/system/weaviate/example_weaviate_openai.py index adf20fd929f1..edad9058f873 100644 --- a/tests/system/providers/weaviate/example_weaviate_openai.py +++ b/providers/tests/system/weaviate/example_weaviate_openai.py @@ -123,7 +123,7 @@ def delete_weaviate_collection(): example_weaviate_openai() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_operator.py b/providers/tests/system/weaviate/example_weaviate_operator.py similarity index 99% rename from tests/system/providers/weaviate/example_weaviate_operator.py rename to providers/tests/system/weaviate/example_weaviate_operator.py index 081f2ef3903d..309864ad6492 100644 --- a/tests/system/providers/weaviate/example_weaviate_operator.py +++ b/providers/tests/system/weaviate/example_weaviate_operator.py @@ -297,7 +297,7 @@ def delete_weaviate_docs_collection_without_vector(): example_weaviate_using_operator() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_using_hook.py b/providers/tests/system/weaviate/example_weaviate_using_hook.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_using_hook.py rename to providers/tests/system/weaviate/example_weaviate_using_hook.py index 8ad7356465a4..601a222f94d4 100644 --- a/tests/system/providers/weaviate/example_weaviate_using_hook.py +++ b/providers/tests/system/weaviate/example_weaviate_using_hook.py @@ -140,7 +140,7 @@ def delete_weaviate_collection_without_vector(): example_weaviate_dag_using_hook() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py similarity index 97% rename from tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py rename to providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py index 546148ad70ed..f0306177bffd 100644 --- a/tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py @@ -99,7 +99,7 @@ def delete_weaviate_collection(): example_weaviate_vectorizer_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py rename to providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py index c4062508e26d..e2cf9e560e78 100644 --- a/tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py @@ -111,7 +111,7 @@ def delete_weaviate_collection(): example_weaviate_without_vectorizer_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/jeopardy_data_with_vectors.json b/providers/tests/system/weaviate/jeopardy_data_with_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_data_with_vectors.json rename to providers/tests/system/weaviate/jeopardy_data_with_vectors.json diff --git a/tests/system/providers/weaviate/jeopardy_data_without_vectors.json b/providers/tests/system/weaviate/jeopardy_data_without_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_data_without_vectors.json rename to providers/tests/system/weaviate/jeopardy_data_without_vectors.json diff --git a/tests/system/providers/weaviate/jeopardy_doc_data_without_vectors.json b/providers/tests/system/weaviate/jeopardy_doc_data_without_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_doc_data_without_vectors.json rename to providers/tests/system/weaviate/jeopardy_doc_data_without_vectors.json diff --git a/tests/system/providers/postgres/__init__.py b/providers/tests/system/yandex/__init__.py similarity index 100% rename from tests/system/providers/postgres/__init__.py rename to providers/tests/system/yandex/__init__.py diff --git a/tests/system/providers/yandex/example_yandexcloud.py b/providers/tests/system/yandex/example_yandexcloud.py similarity index 97% rename from tests/system/providers/yandex/example_yandexcloud.py rename to providers/tests/system/yandex/example_yandexcloud.py index ddebc46a3b50..ab72adc14c05 100644 --- a/tests/system/providers/yandex/example_yandexcloud.py +++ b/providers/tests/system/yandex/example_yandexcloud.py @@ -32,7 +32,8 @@ from airflow import DAG from airflow.decorators import task from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_hook" @@ -196,14 +197,14 @@ def delete_cluster( spark_job >> delete_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_dataproc.py b/providers/tests/system/yandex/example_yandexcloud_dataproc.py similarity index 96% rename from tests/system/providers/yandex/example_yandexcloud_dataproc.py rename to providers/tests/system/yandex/example_yandexcloud_dataproc.py index 7ff4aa541dd5..d97e3d592ef8 100644 --- a/tests/system/providers/yandex/example_yandexcloud_dataproc.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc.py @@ -31,7 +31,8 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id # should be filled with appropriate ids @@ -162,13 +163,13 @@ create_cluster >> create_mapreduce_job >> create_hive_query >> create_hive_query_from_file create_hive_query_from_file >> create_spark_job >> create_pyspark_job >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py similarity index 92% rename from tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py rename to providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py index 475bc789ec2b..38401a1cdcd3 100644 --- a/tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py @@ -27,7 +27,8 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id # should be filled with appropriate ids @@ -70,13 +71,13 @@ ) create_spark_job >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_yq.py b/providers/tests/system/yandex/example_yandexcloud_yq.py similarity index 88% rename from tests/system/providers/yandex/example_yandexcloud_yq.py rename to providers/tests/system/yandex/example_yandexcloud_yq.py index 0ebef685e24b..02478825ed5e 100644 --- a/tests/system/providers/yandex/example_yandexcloud_yq.py +++ b/providers/tests/system/yandex/example_yandexcloud_yq.py @@ -21,7 +21,8 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator from airflow.providers.yandex.operators.yq import YQExecuteQueryOperator -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_yq" @@ -39,13 +40,13 @@ yq_operator = YQExecuteQueryOperator(task_id="sample_query", sql="select 33 as d, 44 as t") yq_operator >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/presto/__init__.py b/providers/tests/system/ydb/__init__.py similarity index 100% rename from tests/system/providers/presto/__init__.py rename to providers/tests/system/ydb/__init__.py diff --git a/tests/system/providers/ydb/example_ydb.py b/providers/tests/system/ydb/example_ydb.py similarity index 97% rename from tests/system/providers/ydb/example_ydb.py rename to providers/tests/system/ydb/example_ydb.py index 39156328f241..d7293cb0d1b5 100644 --- a/tests/system/providers/ydb/example_ydb.py +++ b/providers/tests/system/ydb/example_ydb.py @@ -119,13 +119,13 @@ def populate_pet_table_via_bulk_upsert(): ) # [END ydb_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/qdrant/__init__.py b/providers/tests/system/zendesk/__init__.py similarity index 100% rename from tests/system/providers/qdrant/__init__.py rename to providers/tests/system/zendesk/__init__.py diff --git a/tests/system/providers/zendesk/example_zendesk_custom_get.py b/providers/tests/system/zendesk/example_zendesk_custom_get.py similarity index 95% rename from tests/system/providers/zendesk/example_zendesk_custom_get.py rename to providers/tests/system/zendesk/example_zendesk_custom_get.py index 701a109fd115..c38d10a26abd 100644 --- a/tests/system/providers/zendesk/example_zendesk_custom_get.py +++ b/providers/tests/system/zendesk/example_zendesk_custom_get.py @@ -45,7 +45,7 @@ def fetch_organizations() -> list[dict]: fetch_organizations() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/redis/__init__.py b/providers/tests/tableau/__init__.py similarity index 100% rename from tests/system/providers/redis/__init__.py rename to providers/tests/tableau/__init__.py diff --git a/tests/providers/teradata/triggers/__init__.py b/providers/tests/tableau/hooks/__init__.py similarity index 100% rename from tests/providers/teradata/triggers/__init__.py rename to providers/tests/tableau/hooks/__init__.py diff --git a/tests/providers/tableau/hooks/test_tableau.py b/providers/tests/tableau/hooks/test_tableau.py similarity index 95% rename from tests/providers/tableau/hooks/test_tableau.py rename to providers/tests/tableau/hooks/test_tableau.py index 8d10ce2fc368..23d5e0188f60 100644 --- a/tests/providers/tableau/hooks/test_tableau.py +++ b/providers/tests/tableau/hooks/test_tableau.py @@ -115,10 +115,13 @@ def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tablea """ Test get conn auth via token """ - with pytest.warns( - AirflowProviderDeprecationWarning, - match="Authentication via personal access token is deprecated..*", - ), TableauHook(site_id="test", tableau_conn_id="tableau_test_token") as tableau_hook: + with ( + pytest.warns( + AirflowProviderDeprecationWarning, + match="Authentication via personal access token is deprecated..*", + ), + TableauHook(site_id="test", tableau_conn_id="tableau_test_token") as tableau_hook, + ): mock_server.assert_called_once_with(tableau_hook.conn.host) mock_tableau_auth.assert_called_once_with( token_name=tableau_hook.conn.extra_dejson["token_name"], @@ -158,10 +161,13 @@ def test_get_conn_ssl_default(self, mock_server, mock_tableau_auth): """ Test get conn with default SSL parameters """ - with pytest.warns( - AirflowProviderDeprecationWarning, - match="Authentication via personal access token is deprecated..*", - ), TableauHook(tableau_conn_id="tableau_test_ssl_connection_default") as tableau_hook: + with ( + pytest.warns( + AirflowProviderDeprecationWarning, + match="Authentication via personal access token is deprecated..*", + ), + TableauHook(tableau_conn_id="tableau_test_ssl_connection_default") as tableau_hook, + ): mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={"verify": True, "cert": None} diff --git a/tests/system/providers/salesforce/__init__.py b/providers/tests/tableau/operators/__init__.py similarity index 100% rename from tests/system/providers/salesforce/__init__.py rename to providers/tests/tableau/operators/__init__.py diff --git a/tests/providers/tableau/operators/test_tableau.py b/providers/tests/tableau/operators/test_tableau.py similarity index 100% rename from tests/providers/tableau/operators/test_tableau.py rename to providers/tests/tableau/operators/test_tableau.py diff --git a/tests/system/providers/samba/__init__.py b/providers/tests/tableau/sensors/__init__.py similarity index 100% rename from tests/system/providers/samba/__init__.py rename to providers/tests/tableau/sensors/__init__.py diff --git a/tests/providers/tableau/sensors/test_tableau.py b/providers/tests/tableau/sensors/test_tableau.py similarity index 100% rename from tests/providers/tableau/sensors/test_tableau.py rename to providers/tests/tableau/sensors/test_tableau.py diff --git a/tests/providers/teradata/utils/__init__.py b/providers/tests/telegram/__init__.py similarity index 100% rename from tests/providers/teradata/utils/__init__.py rename to providers/tests/telegram/__init__.py diff --git a/tests/providers/trino/__init__.py b/providers/tests/telegram/hooks/__init__.py similarity index 100% rename from tests/providers/trino/__init__.py rename to providers/tests/telegram/hooks/__init__.py diff --git a/tests/providers/telegram/hooks/test_telegram.py b/providers/tests/telegram/hooks/test_telegram.py similarity index 100% rename from tests/providers/telegram/hooks/test_telegram.py rename to providers/tests/telegram/hooks/test_telegram.py diff --git a/tests/providers/trino/hooks/__init__.py b/providers/tests/telegram/operators/__init__.py similarity index 100% rename from tests/providers/trino/hooks/__init__.py rename to providers/tests/telegram/operators/__init__.py diff --git a/tests/providers/telegram/operators/test_telegram.py b/providers/tests/telegram/operators/test_telegram.py similarity index 100% rename from tests/providers/telegram/operators/test_telegram.py rename to providers/tests/telegram/operators/test_telegram.py diff --git a/tests/system/providers/sftp/__init__.py b/providers/tests/teradata/__init__.py similarity index 100% rename from tests/system/providers/sftp/__init__.py rename to providers/tests/teradata/__init__.py diff --git a/tests/providers/trino/transfers/__init__.py b/providers/tests/teradata/hooks/__init__.py similarity index 100% rename from tests/providers/trino/transfers/__init__.py rename to providers/tests/teradata/hooks/__init__.py diff --git a/tests/providers/teradata/hooks/test_teradata.py b/providers/tests/teradata/hooks/test_teradata.py similarity index 98% rename from tests/providers/teradata/hooks/test_teradata.py rename to providers/tests/teradata/hooks/test_teradata.py index 75367e4d406e..21a19308b2da 100644 --- a/tests/providers/teradata/hooks/test_teradata.py +++ b/providers/tests/teradata/hooks/test_teradata.py @@ -275,9 +275,12 @@ def test_bulk_insert_rows_without_fields(self): def test_bulk_insert_rows_no_rows(self): rows = [] - with pytest.raises(ValueError), pytest.warns( - AirflowProviderDeprecationWarning, - match="bulk_insert_rows is deprecated. Please use the insert_rows method instead.", + with ( + pytest.raises(ValueError), + pytest.warns( + AirflowProviderDeprecationWarning, + match="bulk_insert_rows is deprecated. Please use the insert_rows method instead.", + ), ): self.test_db_hook.bulk_insert_rows("table", rows) diff --git a/tests/providers/vertica/__init__.py b/providers/tests/teradata/operators/__init__.py similarity index 100% rename from tests/providers/vertica/__init__.py rename to providers/tests/teradata/operators/__init__.py diff --git a/tests/providers/teradata/operators/test_teradata.py b/providers/tests/teradata/operators/test_teradata.py similarity index 100% rename from tests/providers/teradata/operators/test_teradata.py rename to providers/tests/teradata/operators/test_teradata.py diff --git a/tests/providers/teradata/operators/test_teradata_compute_cluster.py b/providers/tests/teradata/operators/test_teradata_compute_cluster.py similarity index 100% rename from tests/providers/teradata/operators/test_teradata_compute_cluster.py rename to providers/tests/teradata/operators/test_teradata_compute_cluster.py diff --git a/tests/providers/vertica/hooks/__init__.py b/providers/tests/teradata/transfers/__init__.py similarity index 100% rename from tests/providers/vertica/hooks/__init__.py rename to providers/tests/teradata/transfers/__init__.py diff --git a/tests/providers/teradata/transfers/test_azure_blob_to_teradata.py b/providers/tests/teradata/transfers/test_azure_blob_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_azure_blob_to_teradata.py rename to providers/tests/teradata/transfers/test_azure_blob_to_teradata.py diff --git a/tests/providers/teradata/transfers/test_s3_to_teradata.py b/providers/tests/teradata/transfers/test_s3_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_s3_to_teradata.py rename to providers/tests/teradata/transfers/test_s3_to_teradata.py diff --git a/tests/providers/teradata/transfers/test_teradata_to_teradata.py b/providers/tests/teradata/transfers/test_teradata_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_teradata_to_teradata.py rename to providers/tests/teradata/transfers/test_teradata_to_teradata.py diff --git a/tests/providers/vertica/operators/__init__.py b/providers/tests/teradata/triggers/__init__.py similarity index 100% rename from tests/providers/vertica/operators/__init__.py rename to providers/tests/teradata/triggers/__init__.py diff --git a/tests/providers/teradata/triggers/test_teradata_compute_cluster.py b/providers/tests/teradata/triggers/test_teradata_compute_cluster.py similarity index 100% rename from tests/providers/teradata/triggers/test_teradata_compute_cluster.py rename to providers/tests/teradata/triggers/test_teradata_compute_cluster.py diff --git a/tests/system/providers/__init__.py b/providers/tests/teradata/utils/__init__.py similarity index 100% rename from tests/system/providers/__init__.py rename to providers/tests/teradata/utils/__init__.py diff --git a/tests/providers/teradata/utils/test_constants.py b/providers/tests/teradata/utils/test_constants.py similarity index 100% rename from tests/providers/teradata/utils/test_constants.py rename to providers/tests/teradata/utils/test_constants.py diff --git a/tests/system/providers/amazon/__init__.py b/providers/tests/trino/__init__.py similarity index 100% rename from tests/system/providers/amazon/__init__.py rename to providers/tests/trino/__init__.py diff --git a/tests/system/providers/singularity/__init__.py b/providers/tests/trino/assets/__init__.py similarity index 100% rename from tests/system/providers/singularity/__init__.py rename to providers/tests/trino/assets/__init__.py diff --git a/tests/providers/trino/assets/test_trino.py b/providers/tests/trino/assets/test_trino.py similarity index 100% rename from tests/providers/trino/assets/test_trino.py rename to providers/tests/trino/assets/test_trino.py diff --git a/tests/system/providers/amazon/aws/__init__.py b/providers/tests/trino/hooks/__init__.py similarity index 100% rename from tests/system/providers/amazon/aws/__init__.py rename to providers/tests/trino/hooks/__init__.py diff --git a/tests/providers/trino/hooks/test_trino.py b/providers/tests/trino/hooks/test_trino.py similarity index 100% rename from tests/providers/trino/hooks/test_trino.py rename to providers/tests/trino/hooks/test_trino.py diff --git a/tests/system/providers/slack/__init__.py b/providers/tests/trino/operators/__init__.py similarity index 100% rename from tests/system/providers/slack/__init__.py rename to providers/tests/trino/operators/__init__.py diff --git a/tests/providers/trino/operators/test_trino.py b/providers/tests/trino/operators/test_trino.py similarity index 100% rename from tests/providers/trino/operators/test_trino.py rename to providers/tests/trino/operators/test_trino.py diff --git a/tests/system/providers/apache/__init__.py b/providers/tests/trino/transfers/__init__.py similarity index 100% rename from tests/system/providers/apache/__init__.py rename to providers/tests/trino/transfers/__init__.py diff --git a/tests/providers/trino/transfers/test_gcs_to_trino.py b/providers/tests/trino/transfers/test_gcs_to_trino.py similarity index 100% rename from tests/providers/trino/transfers/test_gcs_to_trino.py rename to providers/tests/trino/transfers/test_gcs_to_trino.py diff --git a/tests/system/providers/apache/beam/__init__.py b/providers/tests/vertica/__init__.py similarity index 100% rename from tests/system/providers/apache/beam/__init__.py rename to providers/tests/vertica/__init__.py diff --git a/tests/system/providers/apache/hive/__init__.py b/providers/tests/vertica/hooks/__init__.py similarity index 100% rename from tests/system/providers/apache/hive/__init__.py rename to providers/tests/vertica/hooks/__init__.py diff --git a/tests/providers/vertica/hooks/test_vertica.py b/providers/tests/vertica/hooks/test_vertica.py similarity index 100% rename from tests/providers/vertica/hooks/test_vertica.py rename to providers/tests/vertica/hooks/test_vertica.py diff --git a/tests/system/providers/apache/iceberg/__init__.py b/providers/tests/vertica/operators/__init__.py similarity index 100% rename from tests/system/providers/apache/iceberg/__init__.py rename to providers/tests/vertica/operators/__init__.py diff --git a/tests/providers/vertica/operators/test_vertica.py b/providers/tests/vertica/operators/test_vertica.py similarity index 100% rename from tests/providers/vertica/operators/test_vertica.py rename to providers/tests/vertica/operators/test_vertica.py diff --git a/tests/system/providers/snowflake/__init__.py b/providers/tests/weaviate/__init__.py similarity index 100% rename from tests/system/providers/snowflake/__init__.py rename to providers/tests/weaviate/__init__.py diff --git a/tests/system/providers/sqlite/__init__.py b/providers/tests/weaviate/hooks/__init__.py similarity index 100% rename from tests/system/providers/sqlite/__init__.py rename to providers/tests/weaviate/hooks/__init__.py diff --git a/tests/providers/weaviate/hooks/test_weaviate.py b/providers/tests/weaviate/hooks/test_weaviate.py similarity index 100% rename from tests/providers/weaviate/hooks/test_weaviate.py rename to providers/tests/weaviate/hooks/test_weaviate.py diff --git a/tests/system/providers/tableau/__init__.py b/providers/tests/weaviate/operators/__init__.py similarity index 100% rename from tests/system/providers/tableau/__init__.py rename to providers/tests/weaviate/operators/__init__.py diff --git a/tests/providers/weaviate/operators/test_weaviate.py b/providers/tests/weaviate/operators/test_weaviate.py similarity index 100% rename from tests/providers/weaviate/operators/test_weaviate.py rename to providers/tests/weaviate/operators/test_weaviate.py diff --git a/tests/system/providers/telegram/__init__.py b/providers/tests/yandex/__init__.py similarity index 100% rename from tests/system/providers/telegram/__init__.py rename to providers/tests/yandex/__init__.py diff --git a/tests/system/providers/teradata/__init__.py b/providers/tests/yandex/hooks/__init__.py similarity index 100% rename from tests/system/providers/teradata/__init__.py rename to providers/tests/yandex/hooks/__init__.py diff --git a/tests/providers/yandex/hooks/test_dataproc.py b/providers/tests/yandex/hooks/test_dataproc.py similarity index 100% rename from tests/providers/yandex/hooks/test_dataproc.py rename to providers/tests/yandex/hooks/test_dataproc.py diff --git a/tests/providers/yandex/hooks/test_yandex.py b/providers/tests/yandex/hooks/test_yandex.py similarity index 91% rename from tests/providers/yandex/hooks/test_yandex.py rename to providers/tests/yandex/hooks/test_yandex.py index e3eb36e2cfc8..fbf77115053b 100644 --- a/tests/providers/yandex/hooks/test_yandex.py +++ b/providers/tests/yandex/hooks/test_yandex.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestYandexHook: @@ -62,9 +63,12 @@ def test_provider_user_agent(self, mock_get_credentials, mock_get_connection): sdk_prefix = "MyAirflow" hook = YandexCloudBaseHook() - with conf_vars({("yandex", "sdk_user_agent_prefix"): sdk_prefix}), pytest.warns( - AirflowProviderDeprecationWarning, - match="Using `provider_user_agent` in `YandexCloudBaseHook` is deprecated. Please use it in `utils.user_agent` instead.", + with ( + conf_vars({("yandex", "sdk_user_agent_prefix"): sdk_prefix}), + pytest.warns( + AirflowProviderDeprecationWarning, + match="Using `provider_user_agent` in `YandexCloudBaseHook` is deprecated. Please use it in `utils.user_agent` instead.", + ), ): assert hook.provider_user_agent().startswith(sdk_prefix) @@ -90,9 +94,12 @@ def test_sdk_user_agent(self, mock_get_credentials, mock_get_connection): ) @mock.patch("airflow.providers.yandex.utils.credentials.get_credentials", new=MagicMock()) def test_backcompat_prefix_works(self, uri): - with mock.patch.dict(os.environ, {"AIRFLOW_CONN_MY_CONN": uri}), pytest.warns( - AirflowProviderDeprecationWarning, - match="Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.", + with ( + mock.patch.dict(os.environ, {"AIRFLOW_CONN_MY_CONN": uri}), + pytest.warns( + AirflowProviderDeprecationWarning, + match="Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.", + ), ): hook = YandexCloudBaseHook("my_conn") assert hook.default_folder_id == "abc" diff --git a/tests/providers/yandex/hooks/test_yq.py b/providers/tests/yandex/hooks/test_yq.py similarity index 100% rename from tests/providers/yandex/hooks/test_yq.py rename to providers/tests/yandex/hooks/test_yq.py diff --git a/tests/system/providers/trino/__init__.py b/providers/tests/yandex/links/__init__.py similarity index 100% rename from tests/system/providers/trino/__init__.py rename to providers/tests/yandex/links/__init__.py diff --git a/tests/providers/yandex/links/test_yq.py b/providers/tests/yandex/links/test_yq.py similarity index 94% rename from tests/providers/yandex/links/test_yq.py rename to providers/tests/yandex/links/test_yq.py index d46862f1c737..2ed720e7e141 100644 --- a/tests/providers/yandex/links/test_yq.py +++ b/providers/tests/yandex/links/test_yq.py @@ -23,8 +23,9 @@ from airflow.models.taskinstance import TaskInstance from airflow.models.xcom import XCom from airflow.providers.yandex.links.yq import YQLink -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.mock_operators import MockOperator yandexcloud = pytest.importorskip("yandexcloud") diff --git a/tests/system/providers/weaviate/__init__.py b/providers/tests/yandex/operators/__init__.py similarity index 100% rename from tests/system/providers/weaviate/__init__.py rename to providers/tests/yandex/operators/__init__.py diff --git a/tests/providers/yandex/operators/test_dataproc.py b/providers/tests/yandex/operators/test_dataproc.py similarity index 100% rename from tests/providers/yandex/operators/test_dataproc.py rename to providers/tests/yandex/operators/test_dataproc.py diff --git a/tests/providers/yandex/operators/test_yq.py b/providers/tests/yandex/operators/test_yq.py similarity index 98% rename from tests/providers/yandex/operators/test_yq.py rename to providers/tests/yandex/operators/test_yq.py index 034f0505517b..7e586e929eaf 100644 --- a/tests/providers/yandex/operators/test_yq.py +++ b/providers/tests/yandex/operators/test_yq.py @@ -22,7 +22,7 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS yandexcloud = pytest.importorskip("yandexcloud") diff --git a/tests/system/providers/yandex/__init__.py b/providers/tests/yandex/secrets/__init__.py similarity index 100% rename from tests/system/providers/yandex/__init__.py rename to providers/tests/yandex/secrets/__init__.py diff --git a/tests/providers/yandex/secrets/test_lockbox.py b/providers/tests/yandex/secrets/test_lockbox.py similarity index 100% rename from tests/providers/yandex/secrets/test_lockbox.py rename to providers/tests/yandex/secrets/test_lockbox.py diff --git a/tests/system/providers/ydb/__init__.py b/providers/tests/yandex/utils/__init__.py similarity index 100% rename from tests/system/providers/ydb/__init__.py rename to providers/tests/yandex/utils/__init__.py diff --git a/tests/providers/yandex/utils/test_credentials.py b/providers/tests/yandex/utils/test_credentials.py similarity index 100% rename from tests/providers/yandex/utils/test_credentials.py rename to providers/tests/yandex/utils/test_credentials.py diff --git a/tests/providers/yandex/utils/test_defaults.py b/providers/tests/yandex/utils/test_defaults.py similarity index 100% rename from tests/providers/yandex/utils/test_defaults.py rename to providers/tests/yandex/utils/test_defaults.py diff --git a/tests/providers/yandex/utils/test_fields.py b/providers/tests/yandex/utils/test_fields.py similarity index 100% rename from tests/providers/yandex/utils/test_fields.py rename to providers/tests/yandex/utils/test_fields.py diff --git a/tests/providers/yandex/utils/test_user_agent.py b/providers/tests/yandex/utils/test_user_agent.py similarity index 100% rename from tests/providers/yandex/utils/test_user_agent.py rename to providers/tests/yandex/utils/test_user_agent.py diff --git a/tests/system/providers/zendesk/__init__.py b/providers/tests/ydb/__init__.py similarity index 100% rename from tests/system/providers/zendesk/__init__.py rename to providers/tests/ydb/__init__.py diff --git a/tests/test_utils/operators/__init__.py b/providers/tests/ydb/hooks/__init__.py similarity index 100% rename from tests/test_utils/operators/__init__.py rename to providers/tests/ydb/hooks/__init__.py diff --git a/tests/providers/ydb/hooks/test_ydb.py b/providers/tests/ydb/hooks/test_ydb.py similarity index 100% rename from tests/providers/ydb/hooks/test_ydb.py rename to providers/tests/ydb/hooks/test_ydb.py diff --git a/tests/test_utils/perf/__init__.py b/providers/tests/ydb/operators/__init__.py similarity index 100% rename from tests/test_utils/perf/__init__.py rename to providers/tests/ydb/operators/__init__.py diff --git a/tests/providers/ydb/operators/test_ydb.py b/providers/tests/ydb/operators/test_ydb.py similarity index 100% rename from tests/providers/ydb/operators/test_ydb.py rename to providers/tests/ydb/operators/test_ydb.py diff --git a/tests/providers/ydb/utils/test_defaults.py b/providers/tests/ydb/utils/__init__.py similarity index 100% rename from tests/providers/ydb/utils/test_defaults.py rename to providers/tests/ydb/utils/__init__.py diff --git a/tests/providers/ydb/utils/test_credentials.py b/providers/tests/ydb/utils/test_credentials.py similarity index 100% rename from tests/providers/ydb/utils/test_credentials.py rename to providers/tests/ydb/utils/test_credentials.py diff --git a/tests/system/providers/google/leveldb/__init__.py b/providers/tests/ydb/utils/test_defaults.py similarity index 99% rename from tests/system/providers/google/leveldb/__init__.py rename to providers/tests/ydb/utils/test_defaults.py index 217e5db96078..13a83393a912 100644 --- a/tests/system/providers/google/leveldb/__init__.py +++ b/providers/tests/ydb/utils/test_defaults.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/system/providers/microsoft/__init__.py b/providers/tests/zendesk/__init__.py similarity index 99% rename from tests/system/providers/microsoft/__init__.py rename to providers/tests/zendesk/__init__.py index 217e5db96078..13a83393a912 100644 --- a/tests/system/providers/microsoft/__init__.py +++ b/providers/tests/zendesk/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/system/providers/dbt/__init__.py b/providers/tests/zendesk/hooks/__init__.py similarity index 99% rename from tests/system/providers/dbt/__init__.py rename to providers/tests/zendesk/hooks/__init__.py index 217e5db96078..13a83393a912 100644 --- a/tests/system/providers/dbt/__init__.py +++ b/providers/tests/zendesk/hooks/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/providers/zendesk/hooks/test_zendesk.py b/providers/tests/zendesk/hooks/test_zendesk.py similarity index 100% rename from tests/providers/zendesk/hooks/test_zendesk.py rename to providers/tests/zendesk/hooks/test_zendesk.py diff --git a/pyproject.toml b/pyproject.toml index bc5842fc51c6..038393b139a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -205,7 +205,6 @@ include = [ "/airflow/git_version" ] exclude = [ - "/airflow/providers/", "/airflow/www/node_modules/" ] artifacts = [ @@ -219,9 +218,6 @@ artifacts = [ include = [ "/airflow", ] -exclude = [ - "/airflow/providers/", -] artifacts = [ "/airflow/www/static/dist/", "/airflow/git_version" @@ -240,9 +236,6 @@ line-length = 110 extend-exclude = [ ".eggs", "*/_vendor/*", - # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can - # ignore them in ruff - "airflow/providers/common/sql/*/*.pyi", "tests/dags/test_imports.py", ] @@ -337,7 +330,21 @@ docstring-code-format = true required-imports = ["from __future__ import annotations"] combine-as-imports = true -[tool.ruff.lint.per-file-ignores] + +section-order = [ + "future", + "standard-library", + "third-party", + "first-party", + "local-folder", + "testing" +] + +# Make sure we put the "dev" imports at the end, not as a third-party module +[tool.ruff.lint.isort.sections] +testing = ["dev", "providers.tests"] + +[tool.ruff.lint.extend-per-file-ignores] "airflow/__init__.py" = ["F401", "TCH004"] "airflow/models/__init__.py" = ["F401", "TCH004"] "airflow/models/sqla_models.py" = ["F401"] @@ -370,6 +377,7 @@ combine-as-imports = true # TRY002: Use `raise Exception` "dev/perf/*" = ["TID253"] "dev/check_files.py" = ["S101"] +"dev/tests_common/*" = ["S101", "TRY002"] "dev/breeze/tests/*" = ["TID253", "S101", "TRY002"] "tests/*" = ["D", "TID253", "S101", "TRY002"] "docker_tests/*" = ["D", "TID253", "S101", "TRY002"] @@ -382,47 +390,12 @@ combine-as-imports = true "airflow/api/auth/backend/kerberos_auth.py" = ["E402"] "airflow/security/kerberos.py" = ["E402"] "airflow/security/utils.py" = ["E402"] -"tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py" = ["E402"] -"tests/providers/common/io/xcom/test_backend.py" = ["E402"] -"tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"] -"tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] -"tests/providers/google/cloud/links/test_translate.py" = ["E402"] -"tests/providers/google/cloud/operators/test_automl.py"= ["E402"] -"tests/providers/google/cloud/operators/test_vertex_ai.py" = ["E402"] -"tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] -"tests/providers/google/cloud/triggers/test_vertex_ai.py" = ["E402"] -"tests/providers/openai/hooks/test_openai.py" = ["E402"] -"tests/providers/openai/operators/test_openai.py" = ["E402"] -"tests/providers/openai/triggers/test_openai.py" = ["E402"] -"tests/providers/opensearch/conftest.py" = ["E402"] -"tests/providers/opensearch/hooks/test_opensearch.py" = ["E402"] -"tests/providers/opensearch/log/test_os_json_formatter.py" = ["E402"] -"tests/providers/opensearch/log/test_os_response.py" = ["E402"] -"tests/providers/opensearch/log/test_os_task_handler.py" = ["E402"] -"tests/providers/opensearch/operators/test_opensearch.py" = ["E402"] -"tests/providers/qdrant/hooks/test_qdrant.py" = ["E402"] -"tests/providers/qdrant/operators/test_qdrant.py" = ["E402"] -"tests/providers/snowflake/operators/test_snowflake_sql.py" = ["E402"] -"tests/providers/yandex/*/*.py" = ["E402"] # All the modules which do not follow B028 yet: https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/ "helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"] -# https://github.com/apache/airflow/issues/39252 -"airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] - # Test compat imports banned imports to allow testing against older airflow versions -"tests/test_utils/compat.py" = ["TID251", "F401"] +"dev/tests_common/test_utils/compat.py" = ["TID251", "F401"] [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. @@ -512,11 +485,10 @@ norecursedirs = [ "airflow", "tests/_internals", "tests/dags_with_system_exit", - "tests/test_utils", "tests/dags_corrupted", "tests/dags", - "tests/system/providers/google/cloud/dataproc/resources", - "tests/system/providers/google/cloud/gcs/resources", + "providers/tests/system/google/cloud/dataproc/resources", + "providers/tests/system/google/cloud/gcs/resources", ] log_level = "INFO" filterwarnings = [ @@ -576,9 +548,9 @@ omit = [ "airflow/contrib/**", "airflow/example_dags/**", "airflow/migrations/**", - "airflow/providers/**/example_dags/**", "airflow/www/node_modules/**", - "airflow/providers/google/ads/_vendor/**", + "providers/src/airflow/providers/**/example_dags/**", + "providers/src/airflow/providers/google/ads/_vendor/**", ] [tool.coverage.report] @@ -609,6 +581,16 @@ show_error_codes = true disable_error_code = [ "annotation-unchecked", ] +# Since there are no __init__.py files in +# providers/src/apache/airflow/providers we need to tell MyPy where the "base" +# is, otherwise when it sees +# providers/src/apache/airflow/providers/redis/__init__.py, it thinks this is +# the toplevel `redis` folder. +explicit_package_bases = true +mypy_path = [ + "$MYPY_CONFIG_FILE_DIR", + "$MYPY_CONFIG_FILE_DIR/providers/src", +] [[tool.mypy.overrides]] module="airflow.config_templates.default_webserver_config" @@ -640,3 +622,16 @@ module=[ "apache_beam.*" ] ignore_errors = true + +[tool.uv] +dev-dependencies = [ + "local-providers", +] + +[tool.uv.sources] +# These names must match the names as defined in the pyproject.toml of the workspace items, +# *not* the workspace folder paths +local-providers = { workspace = true } + +[tool.uv.workspace] +members = ["providers"] diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 2a55d8733c32..66a4736c5fd8 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -85,6 +85,9 @@ services: - type: bind source: ../../../logs target: /root/airflow/logs + - type: bind + source: ../../../providers + target: /opt/airflow/providers - type: bind source: ../../../pyproject.toml target: /opt/airflow/pyproject.toml diff --git a/scripts/ci/kubernetes/k8s_requirements.txt b/scripts/ci/kubernetes/k8s_requirements.txt index 50e9399f5f22..1e587051f584 100644 --- a/scripts/ci/kubernetes/k8s_requirements.txt +++ b/scripts/ci/kubernetes/k8s_requirements.txt @@ -1 +1,3 @@ --e .[devel-devscripts,devel-tests,cncf.kubernetes] --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" +--constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt +-e .[devel-devscripts,devel-tests,cncf.kubernetes] +-e ./providers diff --git a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py index da17f794eaeb..af3afe0abc07 100755 --- a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py +++ b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py @@ -19,25 +19,27 @@ import os import sys -from glob import glob from pathlib import Path -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) -ACCEPTED_NON_INIT_DIRS = ["adr", "doc", "templates"] +ROOT_DIR = Path(__file__).parents[3].resolve() +ACCEPTED_NON_INIT_DIRS = ["adr", "doc", "templates", "__pycache__"] -def check_dir_init_file(provider_files: list[str]) -> None: +def check_dir_init_file(folders: list[Path]) -> None: missing_init_dirs: list[Path] = [] - for candidate_path in provider_files: - if candidate_path.endswith("/__pycache__"): - continue - path = Path(candidate_path) - if path.is_dir() and not (path / "__init__.py").exists(): - if path.name not in ACCEPTED_NON_INIT_DIRS: - missing_init_dirs.append(path) + folders = list(folders) + for path in folders: + for root, dirs, files in os.walk(path): + # Edit it in place, so we don't recurse to folders we don't care about + dirs[:] = [d for d in dirs if d not in ACCEPTED_NON_INIT_DIRS] + + if "__init__.py" in files: + continue + + missing_init_dirs.append(Path(root)) if missing_init_dirs: - with open(os.path.join(ROOT_DIR, "scripts/ci/license-templates/LICENSE.txt")) as license: + with ROOT_DIR.joinpath("scripts/ci/license-templates/LICENSE.txt").open() as license: license_txt = license.readlines() prefixed_licensed_txt = [f"# {line}" if line != "\n" else "#\n" for line in license_txt] @@ -51,7 +53,11 @@ def check_dir_init_file(provider_files: list[str]) -> None: if __name__ == "__main__": - all_provider_subpackage_dirs = sorted(glob(f"{ROOT_DIR}/airflow/providers/**/*", recursive=True)) - check_dir_init_file(all_provider_subpackage_dirs) - all_test_provider_subpackage_dirs = sorted(glob(f"{ROOT_DIR}/tests/providers/**/*", recursive=True)) - check_dir_init_file(all_test_provider_subpackage_dirs) + providers_root = Path(f"{ROOT_DIR}/providers") + providers_ns = providers_root.joinpath("src", "airflow", "providers") + providers_tests = providers_root.joinpath("tests") + + providers_pkgs = sorted(map(lambda f: f.parent, providers_ns.rglob("provider.yaml"))) + check_dir_init_file(providers_pkgs) + + check_dir_init_file([providers_root / "tests"]) diff --git a/scripts/ci/pre_commit/check_system_tests.py b/scripts/ci/pre_commit/check_system_tests.py index 4c82272ad787..c7c305460cb3 100755 --- a/scripts/ci/pre_commit/check_system_tests.py +++ b/scripts/ci/pre_commit/check_system_tests.py @@ -37,13 +37,13 @@ WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()" PYTEST_FUNCTION = """ -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) """ PYTEST_FUNCTION_PATTERN = re.compile( - r"from tests\.system\.utils import get_test_run(?: # noqa: E402)?\s+" + r"from dev\.tests_common\.test_utils\.system_tests import get_test_run(?: # noqa: E402)?\s+" r"(?:# .+\))?\s+" r"test_run = get_test_run\(dag\)" ) @@ -51,11 +51,11 @@ def _check_file(file: Path): content = file.read_text() - if "from tests.system.utils.watcher import watcher" in content: + if "from dev.tests_common.test_utils.watcher import watcher" in content: index = content.find(WATCHER_APPEND_INSTRUCTION) if index == -1: errors.append( - f"[red]The example {file} imports tests.system.utils.watcher " + f"[red]The example {file} imports dev.tests_common.test_utils.watcher " f"but does not use it properly![/]\n\n" "[yellow]Make sure you have:[/]\n\n" f" {WATCHER_APPEND_INSTRUCTION}\n\n" diff --git a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py index fde6f38f45a9..1c1fdb02c179 100755 --- a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py +++ b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py @@ -54,10 +54,10 @@ def check_system_test_entry_hidden(provider_index: Path): :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/{provider_path}/index> + System Tests <_api/tests/system/{provider_path}/index> """ index_text = provider_index.read_text() - system_tests_path = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" / provider_path + system_tests_path = AIRFLOW_SOURCES_ROOT / "providers" / "tests" / "system" / provider_path index_text_manual = index_text.split( ".. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!" )[0] diff --git a/scripts/ci/pre_commit/mypy_folder.py b/scripts/ci/pre_commit/mypy_folder.py index 2e50a3334f29..b2d7c7658027 100755 --- a/scripts/ci/pre_commit/mypy_folder.py +++ b/scripts/ci/pre_commit/mypy_folder.py @@ -31,7 +31,7 @@ initialize_breeze_precommit(__name__, __file__) -ALLOWED_FOLDERS = ["airflow", "airflow/providers", "dev", "docs"] +ALLOWED_FOLDERS = ["airflow", "providers/src/airflow/providers", "dev", "docs"] if len(sys.argv) < 2: console.print(f"[yellow]You need to specify the folder to test as parameter: {ALLOWED_FOLDERS}\n") @@ -43,12 +43,10 @@ sys.exit(1) arguments = [mypy_folder] -if mypy_folder == "airflow/providers": +if mypy_folder == "providers/src/airflow/providers": arguments.extend( [ - "tests/providers", - "tests/system/providers", - "tests/integration/providers", + "providers/tests", "--namespace-packages", ] ) @@ -57,14 +55,6 @@ arguments.extend( [ "tests", - "--exclude", - "airflow/providers", - "--exclude", - "tests/providers", - "--exclude", - "tests/system/providers", - "--exclude", - "tests/integration/providers", ] ) diff --git a/scripts/ci/pre_commit/update_common_sql_api_stubs.py b/scripts/ci/pre_commit/update_common_sql_api_stubs.py index 954302804e6f..371c758146a2 100755 --- a/scripts/ci/pre_commit/update_common_sql_api_stubs.py +++ b/scripts/ci/pre_commit/update_common_sql_api_stubs.py @@ -39,10 +39,12 @@ from common_precommit_black_utils import black_format from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH -PROVIDERS_ROOT = (AIRFLOW_SOURCES_ROOT_PATH / "airflow" / "providers").resolve(strict=True) +PROVIDERS_ROOT = (AIRFLOW_SOURCES_ROOT_PATH / "providers" / "src" / "airflow" / "providers").resolve( + strict=True +) COMMON_SQL_ROOT = (PROVIDERS_ROOT / "common" / "sql").resolve(strict=True) OUT_DIR = AIRFLOW_SOURCES_ROOT_PATH / "out" -OUT_DIR_PROVIDERS = OUT_DIR / "airflow" / "providers" +OUT_DIR_PROVIDERS = OUT_DIR / PROVIDERS_ROOT.relative_to(AIRFLOW_SOURCES_ROOT_PATH) COMMON_SQL_PACKAGE_PREFIX = "airflow.providers.common.sql." @@ -317,7 +319,7 @@ def compare_stub_files(generated_stub_path: Path, force_override: bool) -> tuple shutil.rmtree(OUT_DIR, ignore_errors=True) subprocess.run( - ["stubgen", *[os.fspath(path) for path in COMMON_SQL_ROOT.rglob("*.py")]], + ["stubgen", f"--out={ OUT_DIR }", COMMON_SQL_ROOT], cwd=AIRFLOW_SOURCES_ROOT_PATH, ) total_removals, total_additions = 0, 0 diff --git a/scripts/ci/pre_commit/update_example_dags_paths.py b/scripts/ci/pre_commit/update_example_dags_paths.py index 8b2c461ec8c1..17d2a2ccea45 100755 --- a/scripts/ci/pre_commit/update_example_dags_paths.py +++ b/scripts/ci/pre_commit/update_example_dags_paths.py @@ -34,7 +34,7 @@ console = Console(color_system="standard", width=200) AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() - +PROVIDERS_SRC = AIRFLOW_SOURCES_ROOT / "providers" / "src" / "airflow" / "providers" EXAMPLE_DAGS_URL_MATCHER = re.compile( r"^(.*)(https://github.com/apache/airflow/tree/(.*)/airflow/providers/(.*)/example_dags)(/?>.*)$" @@ -45,10 +45,7 @@ def get_provider_and_version(url_path: str) -> tuple[str, str]: candidate_folders = url_path.split("/") while candidate_folders: try: - with open( - (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath(*candidate_folders) - / "provider.yaml" - ) as f: + with PROVIDERS_SRC.joinpath(*candidate_folders, "provider.yaml").open() as f: provider_info = yaml.safe_load(f) version = provider_info["versions"][0] provider = "-".join(candidate_folders) @@ -68,13 +65,11 @@ def replace_match(file: Path, line: str) -> str | None: if match: url_path_to_dir = match.group(4) folders = url_path_to_dir.split("/") - example_dags_folder = (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath( - *folders - ) / "example_dags" + example_dags_folder = PROVIDERS_SRC.joinpath(*folders, "example_dags") provider, version = get_provider_and_version(url_path_to_dir) proper_system_tests_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/tests/system/providers/{url_path_to_dir}" + f"/providers/tests/system/{url_path_to_dir}" ) if not example_dags_folder.exists(): if proper_system_tests_url in file.read_text(): diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py b/scripts/ci/pre_commit/update_providers_dependencies.py index 903a69238455..8a0e797d0ab6 100755 --- a/scripts/ci/pre_commit/update_providers_dependencies.py +++ b/scripts/ci/pre_commit/update_providers_dependencies.py @@ -34,9 +34,10 @@ AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() -AIRFLOW_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" -AIRFLOW_TESTS_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "tests" / "providers" -AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "system" / "tests" / "providers" +AIRFLOW_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "providers" +AIRFLOW_PROVIDERS_SRC_DIR = AIRFLOW_PROVIDERS_DIR / "src" / "airflow" / "providers" +AIRFLOW_TESTS_PROVIDERS_DIR = AIRFLOW_PROVIDERS_DIR / "tests" +AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR = AIRFLOW_TESTS_PROVIDERS_DIR / "tests" / "system" DEPENDENCIES_JSON_FILE_PATH = AIRFLOW_SOURCES_ROOT / "generated" / "provider_dependencies.json" @@ -95,16 +96,18 @@ def visit_ImportFrom(self, node: ImportFrom): def find_all_providers_and_provider_files(): - for root, _, filenames in os.walk(AIRFLOW_PROVIDERS_DIR): + for root, _, filenames in os.walk(AIRFLOW_PROVIDERS_SRC_DIR): for filename in filenames: if filename == "provider.yaml": provider_file = Path(root, filename) - provider_name = str(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_DIR)).replace( + provider_name = str(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_SRC_DIR)).replace( os.sep, "." ) provider_info = yaml.safe_load(provider_file.read_text()) if provider_info["state"] == "suspended": - suspended_paths.append(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_DIR).as_posix()) + suspended_paths.append( + provider_file.parent.relative_to(AIRFLOW_PROVIDERS_SRC_DIR).as_posix() + ) ALL_PROVIDERS[provider_name] = provider_info path = Path(root, filename) if path.is_file() and path.name.endswith(".py"): @@ -145,7 +148,7 @@ def get_imports_from_file(file_path: Path) -> list[str]: def get_provider_id_from_file_name(file_path: Path) -> str | None: # is_relative_to is only available in Python 3.9 - we should simplify this check when we are Python 3.9+ try: - relative_path = file_path.relative_to(AIRFLOW_PROVIDERS_DIR) + relative_path = file_path.relative_to(AIRFLOW_PROVIDERS_SRC_DIR) except ValueError: try: relative_path = file_path.relative_to(AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR) @@ -173,7 +176,14 @@ def check_if_different_provider_used(file_path: Path) -> None: imported_provider = get_provider_id_from_import(import_name, file_path) if imported_provider is not None and imported_provider not in ALL_PROVIDERS: warnings.append(f"The provider {imported_provider} from {file_path} cannot be found.") - elif imported_provider and file_provider != imported_provider: + continue + + if imported_provider == "standard": + # Standard -- i.e. BashOperator is used in a lot of example dags, but we don't want to mark this + # as a provider cross dependency + if file_path.name == "celery_executor_utils.py" or "/example_dags/" in file_path.as_posix(): + continue + if imported_provider and file_provider != imported_provider: ALL_DEPENDENCIES[file_provider]["cross-providers-deps"].append(imported_provider) diff --git a/scripts/ci/pre_commit/version_heads_map.py b/scripts/ci/pre_commit/version_heads_map.py index 10a6dee2eaf2..6796819444d8 100755 --- a/scripts/ci/pre_commit/version_heads_map.py +++ b/scripts/ci/pre_commit/version_heads_map.py @@ -29,8 +29,9 @@ DB_FILE = PROJECT_SOURCE_ROOT_DIR / "airflow" / "utils" / "db.py" MIGRATION_PATH = PROJECT_SOURCE_ROOT_DIR / "airflow" / "migrations" / "versions" -FAB_DB_FILE = PROJECT_SOURCE_ROOT_DIR / "airflow" / "providers" / "fab" / "auth_manager" / "models" / "db.py" -FAB_MIGRATION_PATH = PROJECT_SOURCE_ROOT_DIR / "airflow" / "providers" / "fab" / "migrations" / "versions" +PROVIDERS_SRC = PROJECT_SOURCE_ROOT_DIR / "providers" / "src" +FAB_DB_FILE = PROVIDERS_SRC / "airflow" / "providers" / "fab" / "auth_manager" / "models" / "db.py" +FAB_MIGRATION_PATH = PROVIDERS_SRC / "airflow" / "providers" / "fab" / "migrations" / "versions" sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is importable diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh index 5db10ad96769..324bca7ccfe0 100644 --- a/scripts/docker/install_airflow.sh +++ b/scripts/docker/install_airflow.sh @@ -48,8 +48,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/scripts/in_container/install_devel_deps.py b/scripts/in_container/install_devel_deps.py index 7a2410f385df..84f28bb2cc48 100755 --- a/scripts/in_container/install_devel_deps.py +++ b/scripts/in_container/install_devel_deps.py @@ -27,7 +27,8 @@ def get_devel_test_deps() -> list[str]: - devel_deps: list[str] = [] + # Pre-install the tests_common pytest plugin/utils, in case sources aren't mounted + devel_deps: list[str] = ["./dev/tests_common"] hatch_build_content = (AIRFLOW_SOURCES_DIR / "hatch_build.py").read_text().splitlines() store = False for line in hatch_build_content: diff --git a/scripts/in_container/run_mypy.sh b/scripts/in_container/run_mypy.sh index 0245825a7264..976963e444a0 100755 --- a/scripts/in_container/run_mypy.sh +++ b/scripts/in_container/run_mypy.sh @@ -30,8 +30,8 @@ then for folder in ${SUSPENDED_PROVIDERS_FOLDERS=} do ADDITIONAL_MYPY_OPTIONS+=( - "--exclude" "airflow/providers/${folder}/*" - "--exclude" "tests/providers/${folder}/*" + "--exclude" "providers/src/airflow/providers/${folder}/*" + "--exclude" "providers/tests/${folder}/*" ) done fi diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index 95978a04ddd0..ab5ebcac697b 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -74,6 +74,7 @@ ) ROOT_DIR = pathlib.Path(__file__).resolve().parents[2] +PROVIDERS_SRC = ROOT_DIR / "providers" / "src" DOCS_DIR = ROOT_DIR.joinpath("docs") PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR.joinpath("airflow", "provider.yaml.schema.json") PROVIDER_ISSUE_TEMPLATE_PATH = ROOT_DIR.joinpath( @@ -97,11 +98,13 @@ suspended_integrations: set[str] = set() -def _filepath_to_module(filepath: pathlib.Path) -> str: - p = filepath.resolve().relative_to(ROOT_DIR).as_posix() - if p.endswith(".py"): - p = p[:-3] - return p.replace("/", ".") +def _filepath_to_module(filepath: pathlib.Path | str) -> str: + if isinstance(filepath, str): + filepath = pathlib.Path(filepath) + if filepath.name == "provider.yaml": + filepath = filepath.parent + p = filepath.resolve().relative_to(PROVIDERS_SRC).with_suffix("") + return p.as_posix().replace("/", ".") def _load_schema() -> dict[str, Any]: @@ -270,8 +273,8 @@ def check_if_objects_exist_and_belong_to_package( def parse_module_data(provider_data, resource_type, yaml_file_path): - package_dir = ROOT_DIR.joinpath(yaml_file_path).parent - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_dir = pathlib.Path(yaml_file_path).parent + package_dir = ROOT_DIR.joinpath(provider_dir) py_files = itertools.chain( package_dir.glob(f"**/{resource_type}/*.py"), package_dir.glob(f"{resource_type}/*.py"), @@ -280,7 +283,7 @@ def parse_module_data(provider_data, resource_type, yaml_file_path): ) expected_modules = {_filepath_to_module(f) for f in py_files if f.name != "__init__.py"} resource_data = provider_data.get(resource_type, []) - return expected_modules, provider_package, resource_data + return expected_modules, _filepath_to_module(provider_dir), resource_data def run_check(title: str): @@ -398,12 +401,9 @@ def check_completeness_of_list_of_transfers(yaml_files: dict[str, dict]) -> tupl current_modules, provider_package, yaml_file_path, resource_type, ObjectType.MODULE ) try: - package_name = os.fspath(ROOT_DIR.joinpath(yaml_file_path).parent.relative_to(ROOT_DIR)).replace( - "/", "." - ) assert_sets_equal( set(expected_modules), - f"Found list of transfer modules in provider package: {package_name}", + f"Found list of transfer modules in provider package: {provider_package}", set(current_modules), f"Currently configured list of transfer modules in {yaml_file_path}", ) @@ -423,7 +423,7 @@ def check_hook_class_name_entries_in_connection_types(yaml_files: dict[str, dict num_errors = 0 num_connection_types = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) connection_types = provider_data.get(resource_type) if connection_types: num_connection_types += len(connection_types) @@ -440,7 +440,7 @@ def check_plugin_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_plugins = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) plugins = provider_data.get(resource_type) if plugins: num_plugins += len(plugins) @@ -460,7 +460,7 @@ def check_extra_link_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_extra_links = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) extra_links = provider_data.get(resource_type) if extra_links: num_extra_links += len(extra_links) @@ -476,7 +476,7 @@ def check_notification_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_notifications = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) notifications = provider_data.get(resource_type) if notifications: num_notifications += len(notifications) @@ -692,7 +692,9 @@ def check_providers_have_all_documentation_files(yaml_files: dict[str, dict]): ProvidersManager().initialize_providers_configuration() architecture = Architecture.get_current() console.print(f"Verifying packages on {architecture} architecture. Platform: {platform.machine()}.") - provider_files_pattern = pathlib.Path(ROOT_DIR, "airflow", "providers").rglob("provider.yaml") + provider_files_pattern = pathlib.Path(ROOT_DIR, "providers", "src", "airflow", "providers").rglob( + "provider.yaml" + ) all_provider_files = sorted(str(path) for path in provider_files_pattern) if len(sys.argv) > 1: paths = [os.fspath(ROOT_DIR / f) for f in sorted(sys.argv[1:])] diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py index 5e41ddda798c..0f6b27c62d4e 100644 --- a/tests/always/test_connection.py +++ b/tests/always/test_connection.py @@ -33,7 +33,8 @@ from airflow.models import Connection, crypto from airflow.providers.sqlite.hooks.sqlite import SqliteHook from airflow.providers_manager import HookInfo -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars ConnectionParts = namedtuple("ConnectionParts", ["conn_type", "login", "password", "host", "port", "schema"]) diff --git a/tests/always/test_example_dags.py b/tests/always/test_example_dags.py index 9d10ce5cad19..9def5f42206f 100644 --- a/tests/always/test_example_dags.py +++ b/tests/always/test_example_dags.py @@ -29,12 +29,13 @@ from airflow.models import DagBag from airflow.utils import yaml -from tests.test_utils.asserts import assert_queries_count + +from dev.tests_common.test_utils.asserts import assert_queries_count AIRFLOW_SOURCES_ROOT = Path(__file__).resolve().parents[2] AIRFLOW_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" CURRENT_PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" -PROVIDERS_PREFIXES = ("airflow/providers/", "tests/system/providers/") +PROVIDERS_PREFIXES = ("providers/src/airflow/providers/", "providers/tests/system/") OPTIONAL_PROVIDERS_DEPENDENCIES: dict[str, dict[str, str | None]] = { # Some examples or system tests may depend on additional packages # that are not included in certain CI checks. @@ -50,17 +51,17 @@ # Generally, these should be resolved as soon as a parameter or operator is deprecated. # If the deprecation is postponed, the item should be added to this tuple, # and a corresponding Issue should be created on GitHub. - "tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py", - "tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py", - "tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py", - "tests/system/providers/google/cloud/datapipelines/example_datapipeline.py", - "tests/system/providers/google/cloud/gcs/example_gcs_sensor.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py", - "tests/system/providers/google/cloud/life_sciences/example_life_sciences.py", + "providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py", + "providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py", + "providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py", + "providers/tests/system/google/cloud/datapipelines/example_datapipeline.py", + "providers/tests/system/google/cloud/gcs/example_gcs_sensor.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py", + "providers/tests/system/google/cloud/life_sciences/example_life_sciences.py", # Deprecated Operators/Hooks, which replaced by common.sql Operators/Hooks ) @@ -89,7 +90,7 @@ def get_suspended_providers_folders() -> list[str]: suspended_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() - .replace("airflow/providers/", "") + .replace("providers/src/airflow/providers/", "") ) return suspended_providers @@ -107,7 +108,7 @@ def get_python_excluded_providers_folders() -> list[str]: excluded_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() - .replace("airflow/providers/", "") + .replace("providers/src/airflow/providers/", "") ) return excluded_providers diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py index b27729a68a26..9db0b22df84f 100644 --- a/tests/always/test_project_structure.py +++ b/tests/always/test_project_structure.py @@ -21,169 +21,159 @@ import itertools import mmap import os +import pathlib import pytest -ROOT_FOLDER = os.path.realpath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) +ROOT_FOLDER = pathlib.Path(__file__).parents[2] +PROVIDERS_SRC = ROOT_FOLDER.joinpath("providers", "src") +PROVIDERS_TESTS = ROOT_FOLDER.joinpath("providers", "tests") class TestProjectStructure: def test_reference_to_providers_from_core(self): - for filename in glob.glob(f"{ROOT_FOLDER}/example_dags/**/*.py", recursive=True): + for filename in ROOT_FOLDER.glob("example_dags/**/*.py"): self.assert_file_not_contains(filename, "providers") def test_deprecated_packages(self): - path_pattern = f"{ROOT_FOLDER}/airflow/contrib/**/*.py" - - for filename in glob.glob(path_pattern, recursive=True): - if filename.endswith("/__init__.py"): + for filename in ROOT_FOLDER.glob("airflow/contrib/**/*.py"): + if filename.name == "__init__.py": self.assert_file_contains(filename, "This package is deprecated.") else: self.assert_file_contains(filename, "This module is deprecated.") - def assert_file_not_contains(self, filename: str, pattern: str): + def assert_file_not_contains(self, filename: pathlib.Path, pattern: str): with open(filename, "rb", 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content: if content.find(bytes(pattern, "utf-8")) != -1: pytest.fail(f"File {filename} not contains pattern - {pattern}") - def assert_file_contains(self, filename: str, pattern: str): + def assert_file_contains(self, filename: pathlib.Path, pattern: str): with open(filename, "rb", 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content: if content.find(bytes(pattern, "utf-8")) == -1: pytest.fail(f"File {filename} contains illegal pattern - {pattern}") def test_providers_modules_should_have_tests(self): """ - Assert every module in /airflow/providers has a corresponding test_ file in tests/airflow/providers. + Assert every module in /providers/src/airflow/providers has a corresponding test_ file in providers/providers. """ # The test below had a but for quite a while and we missed a lot of modules to have tess # We should make sure that one goes to 0 OVERLOOKED_TESTS = [ - "tests/providers/amazon/aws/executors/batch/test_boto_schema.py", - "tests/providers/amazon/aws/executors/batch/test_batch_executor_config.py", - "tests/providers/amazon/aws/executors/batch/test_utils.py", - "tests/providers/amazon/aws/executors/ecs/test_boto_schema.py", - "tests/providers/amazon/aws/executors/ecs/test_ecs_executor_config.py", - "tests/providers/amazon/aws/executors/ecs/test_utils.py", - "tests/providers/amazon/aws/executors/utils/test_base_config_keys.py", - "tests/providers/amazon/aws/operators/test_emr.py", - "tests/providers/amazon/aws/operators/test_sagemaker.py", - "tests/providers/amazon/aws/sensors/test_emr.py", - "tests/providers/amazon/aws/sensors/test_sagemaker.py", - "tests/providers/amazon/aws/test_exceptions.py", - "tests/providers/amazon/aws/triggers/test_step_function.py", - "tests/providers/amazon/aws/utils/test_rds.py", - "tests/providers/amazon/aws/utils/test_sagemaker.py", - "tests/providers/amazon/aws/utils/test_asset_compat_lineage_collector.py", - "tests/providers/amazon/aws/waiters/test_base_waiter.py", - "tests/providers/apache/cassandra/hooks/test_cassandra.py", - "tests/providers/apache/drill/operators/test_drill.py", - "tests/providers/apache/druid/operators/test_druid_check.py", - "tests/providers/apache/hdfs/hooks/test_hdfs.py", - "tests/providers/apache/hdfs/log/test_hdfs_task_handler.py", - "tests/providers/apache/hdfs/sensors/test_hdfs.py", - "tests/providers/apache/hive/plugins/test_hive.py", - "tests/providers/celery/executors/test_celery_executor_utils.py", - "tests/providers/celery/executors/test_default_celery.py", - "tests/providers/cloudant/test_cloudant_fake.py", - "tests/providers/cncf/kubernetes/backcompat/test_backwards_compat_converters.py", - "tests/providers/cncf/kubernetes/executors/test_kubernetes_executor_types.py", - "tests/providers/cncf/kubernetes/executors/test_kubernetes_executor_utils.py", - "tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py", - "tests/providers/cncf/kubernetes/test_k8s_model.py", - "tests/providers/cncf/kubernetes/test_kube_client.py", - "tests/providers/cncf/kubernetes/test_kube_config.py", - "tests/providers/cncf/kubernetes/test_pod_generator_deprecated.py", - "tests/providers/cncf/kubernetes/test_pod_launcher_deprecated.py", - "tests/providers/cncf/kubernetes/test_python_kubernetes_script.py", - "tests/providers/cncf/kubernetes/test_secret.py", - "tests/providers/cncf/kubernetes/triggers/test_kubernetes_pod.py", - "tests/providers/cncf/kubernetes/utils/test_delete_from.py", - "tests/providers/cncf/kubernetes/utils/test_k8s_hashlib_wrapper.py", - "tests/providers/cncf/kubernetes/utils/test_xcom_sidecar.py", - "tests/providers/google/cloud/fs/test_gcs.py", - "tests/providers/google/cloud/links/test_automl.py", - "tests/providers/google/cloud/links/test_base.py", - "tests/providers/google/cloud/links/test_bigquery.py", - "tests/providers/google/cloud/links/test_bigquery_dts.py", - "tests/providers/google/cloud/links/test_bigtable.py", - "tests/providers/google/cloud/links/test_cloud_build.py", - "tests/providers/google/cloud/links/test_cloud_functions.py", - "tests/providers/google/cloud/links/test_cloud_memorystore.py", - "tests/providers/google/cloud/links/test_cloud_sql.py", - "tests/providers/google/cloud/links/test_cloud_storage_transfer.py", - "tests/providers/google/cloud/links/test_cloud_tasks.py", - "tests/providers/google/cloud/links/test_compute.py", - "tests/providers/google/cloud/links/test_data_loss_prevention.py", - "tests/providers/google/cloud/links/test_datacatalog.py", - "tests/providers/google/cloud/links/test_dataflow.py", - "tests/providers/google/cloud/links/test_dataform.py", - "tests/providers/google/cloud/links/test_datafusion.py", - "tests/providers/google/cloud/links/test_dataplex.py", - "tests/providers/google/cloud/links/test_dataprep.py", - "tests/providers/google/cloud/links/test_dataproc.py", - "tests/providers/google/cloud/links/test_datastore.py", - "tests/providers/google/cloud/links/test_kubernetes_engine.py", - "tests/providers/google/cloud/links/test_life_sciences.py", - "tests/providers/google/cloud/links/test_mlengine.py", - "tests/providers/google/cloud/links/test_pubsub.py", - "tests/providers/google/cloud/links/test_spanner.py", - "tests/providers/google/cloud/links/test_stackdriver.py", - "tests/providers/google/cloud/links/test_vertex_ai.py", - "tests/providers/google/cloud/links/test_workflows.py", - "tests/providers/google/cloud/operators/vertex_ai/test_auto_ml.py", - "tests/providers/google/cloud/operators/vertex_ai/test_batch_prediction_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_custom_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_dataset.py", - "tests/providers/google/cloud/operators/vertex_ai/test_endpoint_service.py", - "tests/providers/google/cloud/operators/vertex_ai/test_hyperparameter_tuning_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_model_service.py", - "tests/providers/google/cloud/operators/vertex_ai/test_pipeline_job.py", - "tests/providers/google/cloud/sensors/test_dataform.py", - "tests/providers/google/cloud/transfers/test_bigquery_to_sql.py", - "tests/providers/google/cloud/transfers/test_presto_to_gcs.py", - "tests/providers/google/cloud/utils/test_bigquery.py", - "tests/providers/google/cloud/utils/test_bigquery_get_data.py", - "tests/providers/google/cloud/utils/test_dataform.py", - "tests/providers/google/common/links/test_storage.py", - "tests/providers/google/common/test_consts.py", - "tests/providers/google/test_go_module_utils.py", - "tests/providers/microsoft/azure/operators/test_adls.py", - "tests/providers/microsoft/azure/transfers/test_azure_blob_to_gcs.py", - "tests/providers/openlineage/utils/test_asset_compat_lineage_collector.py", - "tests/providers/slack/notifications/test_slack_notifier.py", - "tests/providers/snowflake/triggers/test_snowflake_trigger.py", - "tests/providers/yandex/hooks/test_yandexcloud_dataproc.py", - "tests/providers/yandex/operators/test_yandexcloud_dataproc.py", - "tests/providers/fab/migrations/test_env.py", + "providers/tests/amazon/aws/executors/batch/test_boto_schema.py", + "providers/tests/amazon/aws/executors/batch/test_batch_executor_config.py", + "providers/tests/amazon/aws/executors/batch/test_utils.py", + "providers/tests/amazon/aws/executors/ecs/test_boto_schema.py", + "providers/tests/amazon/aws/executors/ecs/test_ecs_executor_config.py", + "providers/tests/amazon/aws/executors/ecs/test_utils.py", + "providers/tests/amazon/aws/executors/utils/test_base_config_keys.py", + "providers/tests/amazon/aws/operators/test_emr.py", + "providers/tests/amazon/aws/operators/test_sagemaker.py", + "providers/tests/amazon/aws/sensors/test_emr.py", + "providers/tests/amazon/aws/sensors/test_sagemaker.py", + "providers/tests/amazon/aws/test_exceptions.py", + "providers/tests/amazon/aws/triggers/test_step_function.py", + "providers/tests/amazon/aws/utils/test_rds.py", + "providers/tests/amazon/aws/utils/test_sagemaker.py", + "providers/tests/amazon/aws/waiters/test_base_waiter.py", + "providers/tests/apache/cassandra/hooks/test_cassandra.py", + "providers/tests/apache/drill/operators/test_drill.py", + "providers/tests/apache/druid/operators/test_druid_check.py", + "providers/tests/apache/hdfs/hooks/test_hdfs.py", + "providers/tests/apache/hdfs/log/test_hdfs_task_handler.py", + "providers/tests/apache/hdfs/sensors/test_hdfs.py", + "providers/tests/apache/hive/plugins/test_hive.py", + "providers/tests/celery/executors/test_celery_executor_utils.py", + "providers/tests/celery/executors/test_default_celery.py", + "providers/tests/cloudant/test_cloudant_fake.py", + "providers/tests/cncf/kubernetes/backcompat/test_backwards_compat_converters.py", + "providers/tests/cncf/kubernetes/executors/test_kubernetes_executor_types.py", + "providers/tests/cncf/kubernetes/executors/test_kubernetes_executor_utils.py", + "providers/tests/cncf/kubernetes/operators/test_kubernetes_pod.py", + "providers/tests/cncf/kubernetes/test_k8s_model.py", + "providers/tests/cncf/kubernetes/test_kube_client.py", + "providers/tests/cncf/kubernetes/test_kube_config.py", + "providers/tests/cncf/kubernetes/test_pod_generator_deprecated.py", + "providers/tests/cncf/kubernetes/test_pod_launcher_deprecated.py", + "providers/tests/cncf/kubernetes/test_python_kubernetes_script.py", + "providers/tests/cncf/kubernetes/test_secret.py", + "providers/tests/cncf/kubernetes/triggers/test_kubernetes_pod.py", + "providers/tests/cncf/kubernetes/utils/test_delete_from.py", + "providers/tests/cncf/kubernetes/utils/test_k8s_hashlib_wrapper.py", + "providers/tests/cncf/kubernetes/utils/test_xcom_sidecar.py", + "providers/tests/google/cloud/fs/test_gcs.py", + "providers/tests/google/cloud/links/test_automl.py", + "providers/tests/google/cloud/links/test_base.py", + "providers/tests/google/cloud/links/test_bigquery.py", + "providers/tests/google/cloud/links/test_bigquery_dts.py", + "providers/tests/google/cloud/links/test_bigtable.py", + "providers/tests/google/cloud/links/test_cloud_build.py", + "providers/tests/google/cloud/links/test_cloud_functions.py", + "providers/tests/google/cloud/links/test_cloud_memorystore.py", + "providers/tests/google/cloud/links/test_cloud_sql.py", + "providers/tests/google/cloud/links/test_cloud_storage_transfer.py", + "providers/tests/google/cloud/links/test_cloud_tasks.py", + "providers/tests/google/cloud/links/test_compute.py", + "providers/tests/google/cloud/links/test_data_loss_prevention.py", + "providers/tests/google/cloud/links/test_datacatalog.py", + "providers/tests/google/cloud/links/test_dataflow.py", + "providers/tests/google/cloud/links/test_dataform.py", + "providers/tests/google/cloud/links/test_datafusion.py", + "providers/tests/google/cloud/links/test_dataplex.py", + "providers/tests/google/cloud/links/test_dataprep.py", + "providers/tests/google/cloud/links/test_dataproc.py", + "providers/tests/google/cloud/links/test_datastore.py", + "providers/tests/google/cloud/links/test_kubernetes_engine.py", + "providers/tests/google/cloud/links/test_life_sciences.py", + "providers/tests/google/cloud/links/test_mlengine.py", + "providers/tests/google/cloud/links/test_pubsub.py", + "providers/tests/google/cloud/links/test_spanner.py", + "providers/tests/google/cloud/links/test_stackdriver.py", + "providers/tests/google/cloud/links/test_vertex_ai.py", + "providers/tests/google/cloud/links/test_workflows.py", + "providers/tests/google/cloud/operators/vertex_ai/test_auto_ml.py", + "providers/tests/google/cloud/operators/vertex_ai/test_batch_prediction_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_custom_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_endpoint_service.py", + "providers/tests/google/cloud/operators/vertex_ai/test_hyperparameter_tuning_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_model_service.py", + "providers/tests/google/cloud/operators/vertex_ai/test_pipeline_job.py", + "providers/tests/google/cloud/sensors/test_dataform.py", + "providers/tests/google/cloud/transfers/test_bigquery_to_sql.py", + "providers/tests/google/cloud/transfers/test_presto_to_gcs.py", + "providers/tests/google/cloud/utils/test_bigquery.py", + "providers/tests/google/cloud/utils/test_bigquery_get_data.py", + "providers/tests/google/cloud/utils/test_dataform.py", + "providers/tests/google/common/links/test_storage.py", + "providers/tests/google/common/test_consts.py", + "providers/tests/google/test_go_module_utils.py", + "providers/tests/microsoft/azure/operators/test_adls.py", + "providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py", + "providers/tests/slack/notifications/test_slack_notifier.py", + "providers/tests/snowflake/triggers/test_snowflake_trigger.py", + "providers/tests/yandex/hooks/test_yandexcloud_dataproc.py", + "providers/tests/yandex/operators/test_yandexcloud_dataproc.py", + "providers/tests/fab/migrations/test_env.py", ] # TODO: Should we extend this test to cover other directories? - modules_files = list(glob.glob(f"{ROOT_FOLDER}/airflow/providers/**/*.py", recursive=True)) + modules_files = PROVIDERS_SRC.joinpath("airflow", "providers").glob("**/*.py") + # Exclude __init__.py + modules_files = filter(lambda f: f.name != "__init__.py", modules_files) # Make path relative - modules_files = list(os.path.relpath(f, ROOT_FOLDER) for f in modules_files) + modules_files = map(lambda f: f.relative_to(PROVIDERS_SRC / "airflow" / "providers"), modules_files) # Exclude example_dags - modules_files = list(f for f in modules_files if "/example_dags/" not in f) + modules_files = (f for f in modules_files if "example_dags" not in f.parts) # Exclude _vendor - modules_files = list(f for f in modules_files if "/_vendor/" not in f) - # Exclude __init__.py - modules_files = list(f for f in modules_files if not f.endswith("__init__.py")) + modules_files = (f for f in modules_files if "_vendor" not in f.parts) # Exclude versions file - modules_files = list(f for f in modules_files if "/versions/" not in f) - # Change airflow/ to tests/ - expected_test_files = list( - f'tests/{f.partition("/")[2]}' for f in modules_files if not f.endswith("__init__.py") - ) + modules_files = (f for f in modules_files if "versions" not in f.parts) + # Change src/airflow/providers/ to tests/ + test_folder = pathlib.Path("providers/tests") # Add test_ prefix to filename - expected_test_files = list( - f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}' - for f in expected_test_files - if not f.endswith("__init__.py") - ) + expected_test_files = (test_folder.joinpath(f.with_name("test_" + f.name)) for f in modules_files) - current_test_files = glob.glob(f"{ROOT_FOLDER}/tests/providers/**/*.py", recursive=True) + current_test_files = PROVIDERS_TESTS.glob("**/*.py") # Make path relative current_test_files = (os.path.relpath(f, ROOT_FOLDER) for f in current_test_files) # Exclude __init__.py @@ -219,9 +209,9 @@ def get_imports_from_file(filepath: str): return import_names -def filepath_to_module(filepath: str): - filepath = os.path.relpath(os.path.abspath(filepath), ROOT_FOLDER) - return filepath.replace("/", ".")[: -(len(".py"))] +def filepath_to_module(path: pathlib.Path): + path = path.relative_to(PROVIDERS_SRC) + return path.as_posix().replace("/", ".")[: -(len(".py"))] def print_sorted(container: set, indent: str = " ") -> None: @@ -235,24 +225,22 @@ class ProjectStructureTest: CLASS_SUFFIXES = ["Operator", "Sensor"] def class_paths(self): - """Override this method if your classes are located under different paths""" for resource_type in self.CLASS_DIRS: - python_files = glob.glob( - f"{ROOT_FOLDER}/airflow/providers/{self.PROVIDER}/**/{resource_type}/**.py", recursive=True + python_files = PROVIDERS_SRC.glob( + f"airflow/providers/{self.PROVIDER}/**/{resource_type}/**/*.py", ) # Make path relative - resource_files = (os.path.relpath(f, ROOT_FOLDER) for f in python_files) - resource_files = (f for f in resource_files if not f.endswith("__init__.py")) + resource_files = filter(lambda f: f.name != "__init__.py", python_files) yield from resource_files def list_of_classes(self): classes = {} for operator_file in self.class_paths(): - operators_paths = self.get_classes_from_file(f"{ROOT_FOLDER}/{operator_file}") + operators_paths = self.get_classes_from_file(operator_file) classes.update(operators_paths) return classes - def get_classes_from_file(self, filepath: str): + def get_classes_from_file(self, filepath: pathlib.Path): with open(filepath) as py_file: content = py_file.read() doc_node = ast.parse(content, filepath) @@ -282,11 +270,12 @@ def example_paths(self): """Override this method if your example dags are located elsewhere""" # old_design: yield from glob.glob( - f"{ROOT_FOLDER}/airflow/providers/{self.PROVIDER}/**/example_dags/example_*.py", recursive=True + f"{ROOT_FOLDER}/providers/src/airflow/providers/{self.PROVIDER}/**/example_dags/example_*.py", + recursive=True, ) # new_design: yield from glob.glob( - f"{ROOT_FOLDER}/tests/system/providers/{self.PROVIDER}/**/example_*.py", recursive=True + f"{ROOT_FOLDER}/providers/tests/system/{self.PROVIDER}/**/example_*.py", recursive=True ) def test_missing_examples(self): diff --git a/tests/always/test_secrets.py b/tests/always/test_secrets.py index 9e1381faf1fe..a8b2296de1cc 100644 --- a/tests/always/test_secrets.py +++ b/tests/always/test_secrets.py @@ -24,8 +24,9 @@ from airflow.configuration import ensure_secrets_loaded, initialize_secrets_backends from airflow.models import Connection, Variable from airflow.secrets.cache import SecretCache -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_variables class TestConnectionsFromSecrets: diff --git a/tests/always/test_secrets_backends.py b/tests/always/test_secrets_backends.py index e862ed3263a6..c5093d2e562a 100644 --- a/tests/always/test_secrets_backends.py +++ b/tests/always/test_secrets_backends.py @@ -28,7 +28,8 @@ from airflow.secrets.environment_variables import EnvironmentVariablesBackend from airflow.secrets.metastore import MetastoreBackend from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections, clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/always/test_secrets_local_filesystem.py b/tests/always/test_secrets_local_filesystem.py index 44b93dedb822..40472ec64d5f 100644 --- a/tests/always/test_secrets_local_filesystem.py +++ b/tests/always/test_secrets_local_filesystem.py @@ -29,7 +29,8 @@ from airflow.models import Variable from airflow.secrets import local_filesystem from airflow.secrets.local_filesystem import LocalFilesystemBackend -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @contextmanager diff --git a/tests/api_connexion/conftest.py b/tests/api_connexion/conftest.py index 6a23b2cf11d9..8ab76fbb7629 100644 --- a/tests/api_connexion/conftest.py +++ b/tests/api_connexion/conftest.py @@ -19,8 +19,9 @@ import pytest from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="session") @@ -38,7 +39,7 @@ def minimal_app_for_api(): def factory(): with conf_vars( { - ("api", "auth_backends"): "tests.test_utils.remote_user_api_auth_backend", + ("api", "auth_backends"): "dev.tests_common.test_utils.remote_user_api_auth_backend", ( "core", "auth_manager", diff --git a/tests/api_connexion/endpoints/test_asset_endpoint.py b/tests/api_connexion/endpoints/test_asset_endpoint.py index 40ad59067f59..39b0129c3317 100644 --- a/tests/api_connexion/endpoints/test_asset_endpoint.py +++ b/tests/api_connexion/endpoints/test_asset_endpoint.py @@ -36,11 +36,12 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_backfill_endpoint.py b/tests/api_connexion/endpoints/test_backfill_endpoint.py index dd086339b73a..67ec6316e2ff 100644 --- a/tests/api_connexion/endpoints/test_backfill_endpoint.py +++ b/tests/api_connexion/endpoints/test_backfill_endpoint.py @@ -30,8 +30,14 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] diff --git a/tests/api_connexion/endpoints/test_config_endpoint.py b/tests/api_connexion/endpoints/test_config_endpoint.py index bd88c491c952..c46a6571c23b 100644 --- a/tests/api_connexion/endpoints/test_config_endpoint.py +++ b/tests/api_connexion/endpoints/test_config_endpoint.py @@ -21,8 +21,8 @@ import pytest -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py b/tests/api_connexion/endpoints/test_connection_endpoint.py index a140046656e3..fb2d53074cd8 100644 --- a/tests/api_connexion/endpoints/test_connection_endpoint.py +++ b/tests/api_connexion/endpoints/test_connection_endpoint.py @@ -25,10 +25,11 @@ from airflow.models import Connection from airflow.secrets.environment_variables import CONN_ENV_PREFIX from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index 907009d26df8..86e0a3b4b3ec 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -30,10 +30,11 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.session import provide_session from airflow.utils.state import TaskInstanceState -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_parsing.py b/tests/api_connexion/endpoints/test_dag_parsing.py index ae42a565dd05..0fca8ee63c97 100644 --- a/tests/api_connexion/endpoints/test_dag_parsing.py +++ b/tests/api_connexion/endpoints/test_dag_parsing.py @@ -24,8 +24,9 @@ from airflow.models import DagBag from airflow.models.dagbag import DagPriorityParsingRequest -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_dag_parsing_requests + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_parsing_requests pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index 6262d4e83479..b881f5ea9863 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -34,11 +34,12 @@ from airflow.utils.session import create_session, provide_session from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py index f4df56ba629a..80f32dc715a3 100644 --- a/tests/api_connexion/endpoints/test_dag_source_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py @@ -23,8 +23,9 @@ import pytest from airflow.models import DagBag -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py index 9ab5b4976593..a447e2a6a4b2 100644 --- a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py @@ -26,8 +26,9 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py index f156d8921c0e..b9c00991413b 100644 --- a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py @@ -23,8 +23,9 @@ from airflow.models.dag import DagModel from airflow.models.dagwarning import DagWarning from airflow.utils.session import create_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dag_warnings, clear_db_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py b/tests/api_connexion/endpoints/test_event_log_endpoint.py index e5ca3d301765..42a610890ede 100644 --- a/tests/api_connexion/endpoints/test_event_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py @@ -21,9 +21,10 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Log from airflow.utils import timezone -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_logs + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_logs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 2c3eacdc91dc..373571a42ee4 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -30,11 +30,12 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from tests.test_utils.db import clear_db_runs, clear_db_xcom -from tests.test_utils.mock_operators import CustomOperator -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from dev.tests_common.test_utils.mock_operators import CustomOperator +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_import_error_endpoint.py b/tests/api_connexion/endpoints/test_import_error_endpoint.py index af2b83ebb1ee..f4f7d03b323f 100644 --- a/tests/api_connexion/endpoints/test_import_error_endpoint.py +++ b/tests/api_connexion/endpoints/test_import_error_endpoint.py @@ -23,10 +23,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import ParseImportError -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_import_errors + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 2b112e322184..21d4e8feb83a 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -32,8 +32,9 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py index fc53b8952f4a..e41bbcc6c2e8 100644 --- a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py @@ -32,9 +32,10 @@ from airflow.utils.session import provide_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from dev.tests_common.test_utils.mock_operators import MockOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_plugin_endpoint.py b/tests/api_connexion/endpoints/test_plugin_endpoint.py index 6670c684cb45..2831af06bf3d 100644 --- a/tests/api_connexion/endpoints/test_plugin_endpoint.py +++ b/tests/api_connexion/endpoints/test_plugin_endpoint.py @@ -28,10 +28,11 @@ from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.timetables.base import Timetable from airflow.utils.module_loading import qualname -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import BaseOperatorLink -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import BaseOperatorLink +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_pool_endpoint.py b/tests/api_connexion/endpoints/test_pool_endpoint.py index 2cc095d077aa..455c417bd258 100644 --- a/tests/api_connexion/endpoints/test_pool_endpoint.py +++ b/tests/api_connexion/endpoints/test_pool_endpoint.py @@ -21,10 +21,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models.pool import Pool from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_provider_endpoint.py b/tests/api_connexion/endpoints/test_provider_endpoint.py index b4cf8f10a92a..92e14887ec03 100644 --- a/tests/api_connexion/endpoints/test_provider_endpoint.py +++ b/tests/api_connexion/endpoints/test_provider_endpoint.py @@ -21,7 +21,8 @@ import pytest from airflow.providers_manager import ProviderInfo -from tests.test_utils.api_connexion_utils import create_user, delete_user + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index b2e068bd507f..60237946a82c 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -27,8 +27,9 @@ from airflow.models.expandinput import EXPAND_INPUT_EMPTY from airflow.models.serialized_dag import SerializedDagModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index b5b3163e988d..809c2fab9aa2 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -35,9 +35,10 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_variable_endpoint.py b/tests/api_connexion/endpoints/test_variable_endpoint.py index aa5f7c99674f..e4302636184d 100644 --- a/tests/api_connexion/endpoints/test_variable_endpoint.py +++ b/tests/api_connexion/endpoints/test_variable_endpoint.py @@ -22,10 +22,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Variable -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_variables -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_variables +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 809e537f9f88..0293e672c47d 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -30,9 +30,10 @@ from airflow.utils.session import create_session from airflow.utils.timezone import utcnow from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_connection_schema.py b/tests/api_connexion/schemas/test_connection_schema.py index cf614eb3b114..d7a0de4e89f0 100644 --- a/tests/api_connexion/schemas/test_connection_schema.py +++ b/tests/api_connexion/schemas/test_connection_schema.py @@ -30,7 +30,8 @@ ) from airflow.models import Connection from airflow.utils.session import create_session, provide_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_dag_run_schema.py b/tests/api_connexion/schemas/test_dag_run_schema.py index 01ed33fd5988..b874ade4442f 100644 --- a/tests/api_connexion/schemas/test_dag_run_schema.py +++ b/tests/api_connexion/schemas/test_dag_run_schema.py @@ -29,8 +29,9 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/schemas/test_dataset_schema.py b/tests/api_connexion/schemas/test_dataset_schema.py index 4a6fdf6e6d51..769de6f1b10e 100644 --- a/tests/api_connexion/schemas/test_dataset_schema.py +++ b/tests/api_connexion/schemas/test_dataset_schema.py @@ -30,7 +30,8 @@ from airflow.assets import Asset from airflow.models.asset import AssetAliasModel, AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.db import clear_db_assets, clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_error_schema.py b/tests/api_connexion/schemas/test_error_schema.py index fb5e9ea219d2..5c358f2e2c37 100644 --- a/tests/api_connexion/schemas/test_error_schema.py +++ b/tests/api_connexion/schemas/test_error_schema.py @@ -25,8 +25,9 @@ ) from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.compat import ParseImportError -from tests.test_utils.db import clear_db_import_errors + +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.db import clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_plugin_schema.py b/tests/api_connexion/schemas/test_plugin_schema.py index 199db02c7268..ee079ce66434 100644 --- a/tests/api_connexion/schemas/test_plugin_schema.py +++ b/tests/api_connexion/schemas/test_plugin_schema.py @@ -28,7 +28,8 @@ ) from airflow.hooks.base import BaseHook from airflow.plugins_manager import AirflowPlugin -from tests.test_utils.compat import BaseOperatorLink + +from dev.tests_common.test_utils.compat import BaseOperatorLink class PluginHook(BaseHook): ... diff --git a/tests/api_connexion/schemas/test_pool_schemas.py b/tests/api_connexion/schemas/test_pool_schemas.py index 110103073aaa..23788426c501 100644 --- a/tests/api_connexion/schemas/test_pool_schemas.py +++ b/tests/api_connexion/schemas/test_pool_schemas.py @@ -21,7 +21,8 @@ from airflow.api_connexion.schemas.pool_schema import PoolCollection, pool_collection_schema, pool_schema from airflow.models.pool import Pool from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_pools + +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_xcom_schema.py b/tests/api_connexion/schemas/test_xcom_schema.py index e1f97dad0520..06c36c31b776 100644 --- a/tests/api_connexion/schemas/test_xcom_schema.py +++ b/tests/api_connexion/schemas/test_xcom_schema.py @@ -30,7 +30,8 @@ from airflow.models import DagRun, XCom from airflow.utils.dates import parse_execution_date from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_auth.py b/tests/api_connexion/test_auth.py index 54e5632ad84d..e568d618ad66 100644 --- a/tests/api_connexion/test_auth.py +++ b/tests/api_connexion/test_auth.py @@ -22,9 +22,10 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.auth.managers.simple.user import SimpleAuthManagerUser -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools + +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_parameters.py b/tests/api_connexion/test_parameters.py index d5fdbb79b9cc..57c9c05415f6 100644 --- a/tests/api_connexion/test_parameters.py +++ b/tests/api_connexion/test_parameters.py @@ -29,7 +29,8 @@ validate_istimezone, ) from airflow.utils import timezone -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestValidateIsTimezone: diff --git a/tests/api_connexion/test_security.py b/tests/api_connexion/test_security.py index c6a112b1a1bb..2051cf613510 100644 --- a/tests/api_connexion/test_security.py +++ b/tests/api_connexion/test_security.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_fastapi/views/public/test_connections.py b/tests/api_fastapi/views/public/test_connections.py index 0b76fdbe4f1b..a5cb301a4313 100644 --- a/tests/api_fastapi/views/public/test_connections.py +++ b/tests/api_fastapi/views/public/test_connections.py @@ -20,7 +20,8 @@ from airflow.models import Connection from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 58b3daf35c64..7ac93a2f2e07 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -27,7 +27,8 @@ from airflow.utils.session import provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index 0ee2cebd7734..7957d0bf2224 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -20,7 +20,8 @@ from airflow.models.variable import Variable from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_assets.py b/tests/api_fastapi/views/ui/test_assets.py index 99cd9bc1e996..d0e93c709b07 100644 --- a/tests/api_fastapi/views/ui/test_assets.py +++ b/tests/api_fastapi/views/ui/test_assets.py @@ -20,7 +20,8 @@ from airflow.assets import Asset from airflow.operators.empty import EmptyOperator -from tests.conftest import initial_db_init + +from dev.tests_common.test_utils.db import initial_db_init pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_dashboard.py b/tests/api_fastapi/views/ui/test_dashboard.py index 970b79ad3568..bee683535704 100644 --- a/tests/api_fastapi/views/ui/test_dashboard.py +++ b/tests/api_fastapi/views/ui/test_dashboard.py @@ -26,7 +26,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/api_internal/endpoints/test_rpc_api_endpoint.py b/tests/api_internal/endpoints/test_rpc_api_endpoint.py index f12e0ae087bb..364b0f44eaf2 100644 --- a/tests/api_internal/endpoints/test_rpc_api_endpoint.py +++ b/tests/api_internal/endpoints/test_rpc_api_endpoint.py @@ -34,8 +34,9 @@ from airflow.utils.jwt_signer import JWTSigner from airflow.utils.state import State from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting diff --git a/tests/api_internal/test_internal_api_call.py b/tests/api_internal/test_internal_api_call.py index d779b504ea47..02ae2d9f5512 100644 --- a/tests/api_internal/test_internal_api_call.py +++ b/tests/api_internal/test_internal_api_call.py @@ -34,7 +34,8 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.settings import _ENABLE_AIP_44 from airflow.utils.state import State -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic diff --git a/tests/assets/test_manager.py b/tests/assets/test_manager.py index 950949fe474e..cd4563d9a9c4 100644 --- a/tests/assets/test_manager.py +++ b/tests/assets/test_manager.py @@ -48,7 +48,7 @@ @pytest.fixture def clear_assets(): - from tests.test_utils.db import clear_db_assets + from dev.tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/assets/tests_asset.py b/tests/assets/tests_asset.py index afbb46827f3a..10d91e7a00fb 100644 --- a/tests/assets/tests_asset.py +++ b/tests/assets/tests_asset.py @@ -43,7 +43,7 @@ @pytest.fixture def clear_assets(): - from tests.test_utils.db import clear_db_assets + from dev.tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/auth/managers/simple/views/test_auth.py b/tests/auth/managers/simple/views/test_auth.py index 197ed0e615ca..a4e2a12fdcf1 100644 --- a/tests/auth/managers/simple/views/test_auth.py +++ b/tests/auth/managers/simple/views/test_auth.py @@ -23,7 +23,8 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.www import app as application -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/cli/commands/test_celery_command.py b/tests/cli/commands/test_celery_command.py index 476e6279fbf1..c29d835d8ee0 100644 --- a/tests/cli/commands/test_celery_command.py +++ b/tests/cli/commands/test_celery_command.py @@ -30,7 +30,8 @@ from airflow.cli.commands import celery_command from airflow.configuration import conf from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py index 030303c28ec4..90d38fa27ad0 100644 --- a/tests/cli/commands/test_config_command.py +++ b/tests/cli/commands/test_config_command.py @@ -22,7 +22,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import config_command -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars STATSD_CONFIG_BEGIN_WITH = "# `StatsD `" diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py index fbfd3d2945e4..06578eb1492c 100644 --- a/tests/cli/commands/test_connection_command.py +++ b/tests/cli/commands/test_connection_command.py @@ -33,7 +33,8 @@ from airflow.models import Connection from airflow.utils.db import merge_conn from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index 338ae44a2b95..00364794f1d5 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -47,8 +47,9 @@ from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc) if pendulum.__version__.startswith("3"): diff --git a/tests/cli/commands/test_dag_processor_command.py b/tests/cli/commands/test_dag_processor_command.py index 2b84f506e78f..643710ce1e41 100644 --- a/tests/cli/commands/test_dag_processor_command.py +++ b/tests/cli/commands/test_dag_processor_command.py @@ -24,7 +24,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import dag_processor_command from airflow.configuration import conf -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index 44287bb0ec16..c6bea2d8a5bb 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -32,7 +32,8 @@ from airflow.config_templates import airflow_local_settings from airflow.logging_config import configure_logging from airflow.version import version as airflow_version -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def capture_show_output(instance): diff --git a/tests/cli/commands/test_internal_api_command.py b/tests/cli/commands/test_internal_api_command.py index 194f7874839c..11123c7c211a 100644 --- a/tests/cli/commands/test_internal_api_command.py +++ b/tests/cli/commands/test_internal_api_command.py @@ -32,7 +32,8 @@ from airflow.cli.commands.internal_api_command import GunicornMonitor from airflow.settings import _ENABLE_AIP_44 from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars console = Console(width=400, color_system="standard") diff --git a/tests/cli/commands/test_jobs_command.py b/tests/cli/commands/test_jobs_command.py index 28c739a2f260..61540a68c51c 100644 --- a/tests/cli/commands/test_jobs_command.py +++ b/tests/cli/commands/test_jobs_command.py @@ -27,7 +27,8 @@ from airflow.jobs.scheduler_job_runner import SchedulerJobRunner from airflow.utils.session import create_session from airflow.utils.state import JobState, State -from tests.test_utils.db import clear_db_jobs + +from dev.tests_common.test_utils.db import clear_db_jobs @pytest.mark.skip_if_database_isolation_mode diff --git a/tests/cli/commands/test_kerberos_command.py b/tests/cli/commands/test_kerberos_command.py index 35e90fb54923..4a5e73ece788 100644 --- a/tests/cli/commands/test_kerberos_command.py +++ b/tests/cli/commands/test_kerberos_command.py @@ -23,7 +23,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import kerberos_command from airflow.security.kerberos import KerberosMode -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_kubernetes_command.py b/tests/cli/commands/test_kubernetes_command.py index d489aa3ea93f..1e53bdc77d8d 100644 --- a/tests/cli/commands/test_kubernetes_command.py +++ b/tests/cli/commands/test_kubernetes_command.py @@ -28,7 +28,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import kubernetes_command from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_plugins_command.py b/tests/cli/commands/test_plugins_command.py index 288b4453bb15..703b6879d7a3 100644 --- a/tests/cli/commands/test_plugins_command.py +++ b/tests/cli/commands/test_plugins_command.py @@ -29,7 +29,8 @@ from airflow.listeners.listener import get_listener_manager from airflow.plugins_manager import AirflowPlugin from tests.plugins.test_plugin import AirflowTestPlugin as ComplexAirflowPlugin -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -88,15 +89,15 @@ def test_should_display_one_plugin(self): } ], "global_operator_extra_links": [ - "", - "", + "", + "", ], "timetables": ["tests.plugins.test_plugin.CustomCronDataIntervalTimetable"], "operator_extra_links": [ - "", - "", - "", - "", + "", + "", + "", + "", ], "hooks": ["tests.plugins.test_plugin.PluginHook"], "listeners": [ diff --git a/tests/cli/commands/test_rotate_fernet_key_command.py b/tests/cli/commands/test_rotate_fernet_key_command.py index d45753d75f70..d638c2010e18 100644 --- a/tests/cli/commands/test_rotate_fernet_key_command.py +++ b/tests/cli/commands/test_rotate_fernet_key_command.py @@ -26,8 +26,9 @@ from airflow.hooks.base import BaseHook from airflow.models import Connection, Variable from airflow.utils.session import provide_session -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_variables + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_scheduler_command.py b/tests/cli/commands/test_scheduler_command.py index 2e28196cb5c3..8fce5d6e289b 100644 --- a/tests/cli/commands/test_scheduler_command.py +++ b/tests/cli/commands/test_scheduler_command.py @@ -29,7 +29,8 @@ from airflow.executors import executor_loader from airflow.utils.scheduler_health import HealthServer, serve_health_check from airflow.utils.serve_logs import serve_logs -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 3397005b80eb..fb512792eb32 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -51,9 +51,10 @@ from airflow.utils.session import create_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools, clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools, clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/cli/commands/test_variable_command.py b/tests/cli/commands/test_variable_command.py index f95e7b164827..d35184c73c80 100644 --- a/tests/cli/commands/test_variable_command.py +++ b/tests/cli/commands/test_variable_command.py @@ -29,7 +29,8 @@ from airflow.cli.commands import variable_command from airflow.models import Variable from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index 5531f674689b..b72f4e459603 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -31,7 +31,8 @@ from airflow.cli.commands import webserver_command from airflow.cli.commands.webserver_command import GunicornMonitor from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars console = Console(width=400, color_system="standard") diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 9f0a63af0697..61e27b0e74de 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -21,12 +21,12 @@ import pytest -from airflow.cli import cli_parser from airflow.executors import local_executor from airflow.models.dagbag import DagBag from airflow.providers.celery.executors import celery_executor, celery_kubernetes_executor from airflow.providers.cncf.kubernetes.executors import kubernetes_executor, local_kubernetes_executor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars # Create custom executors here because conftest is imported first custom_executor_module = type(sys)("custom_executor") @@ -61,4 +61,6 @@ def dagbag(): @pytest.fixture(scope="session") def parser(): + from airflow.cli import cli_parser + return cli_parser.get_parser() diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py index 2244b6dbd586..46ac36803fac 100644 --- a/tests/cli/test_cli_parser.py +++ b/tests/cli/test_cli_parser.py @@ -43,7 +43,8 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/conftest.py b/tests/conftest.py index 60d009416fe8..f956865e7a1c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,126 +16,33 @@ # under the License. from __future__ import annotations -import json import os -import platform -import re -import subprocess import sys -from contextlib import ExitStack, suppress -from datetime import datetime, timedelta, timezone -from pathlib import Path from typing import TYPE_CHECKING import pytest -import time_machine -from itsdangerous import URLSafeSerializer - -if TYPE_CHECKING: - from tests._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 - from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 # We should set these before loading _any_ of the rest of airflow so that the # unit test mode config is set as early as possible. assert "airflow" not in sys.modules, "No airflow module can be imported before these lines" -keep_env_variables = "--keep-env-variables" in sys.argv - -if not keep_env_variables: - # Clear all Environment Variables that might have side effect, - # For example, defined in /files/airflow-breeze-config/variables.env - _AIRFLOW_CONFIG_PATTERN = re.compile(r"^AIRFLOW__(.+)__(.+)$") - _KEEP_CONFIGS_SETTINGS: dict[str, dict[str, set[str]]] = { - # Keep always these configurations - "always": { - "database": {"sql_alchemy_conn"}, - "core": {"sql_alchemy_conn"}, - "celery": {"result_backend", "broker_url"}, - }, - # Keep per enabled integrations - "celery": {"celery": {"*"}, "celery_broker_transport_options": {"*"}}, - "kerberos": {"kerberos": {"*"}}, - } - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - _KEEP_CONFIGS_SETTINGS["always"].update( - { - "core": { - "internal_api_url", - "fernet_key", - "database_access_isolation", - "internal_api_secret_key", - "internal_api_clock_grace", - }, - } - ) - _ENABLED_INTEGRATIONS = {e.split("_", 1)[-1].lower() for e in os.environ if e.startswith("INTEGRATION_")} - _KEEP_CONFIGS: dict[str, set[str]] = {} - for keep_settings_key in ("always", *_ENABLED_INTEGRATIONS): - if keep_settings := _KEEP_CONFIGS_SETTINGS.get(keep_settings_key): - for section, options in keep_settings.items(): - if section not in _KEEP_CONFIGS: - _KEEP_CONFIGS[section] = options - else: - _KEEP_CONFIGS[section].update(options) - for env_key in os.environ.copy(): - if m := _AIRFLOW_CONFIG_PATTERN.match(env_key): - section, option = m.group(1).lower(), m.group(2).lower() - if not (ko := _KEEP_CONFIGS.get(section)) or not ("*" in ko or option in ko): - del os.environ[env_key] - -SUPPORTED_DB_BACKENDS = ("sqlite", "postgres", "mysql") - -# A bit of a Hack - but we need to check args before they are parsed by pytest in order to -# configure the DB before Airflow gets initialized (which happens at airflow import time). -# Using env variables also handles the case, when python-xdist is used - python-xdist spawns separate -# processes and does not pass all args to them (it's done via env variables) so we are doing the -# same here and detect whether `--skip-db-tests` or `--run-db-tests-only` is passed to pytest -# and set env variables so the processes spawned by python-xdist can read the status from there -skip_db_tests = "--skip-db-tests" in sys.argv or os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true" -run_db_tests_only = ( - "--run-db-tests-only" in sys.argv or os.environ.get("_AIRFLOW_RUN_DB_TESTS_ONLY") == "true" -) - -if skip_db_tests: - if run_db_tests_only: - raise Exception("You cannot specify both --skip-db-tests and --run-db-tests-only together") - # Make sure sqlalchemy will not be usable for pure unit tests even if initialized - os.environ["AIRFLOW__CORE__SQL_ALCHEMY_CONN"] = "bad_schema:///" - os.environ["AIRFLOW__DATABASE__SQL_ALCHEMY_CONN"] = "bad_schema:///" - os.environ["_IN_UNIT_TESTS"] = "true" - # Set it here to pass the flag to python-xdist spawned processes - os.environ["_AIRFLOW_SKIP_DB_TESTS"] = "true" - -if run_db_tests_only: - # Set it here to pass the flag to python-xdist spawned processes - os.environ["_AIRFLOW_RUN_DB_TESTS_ONLY"] = "true" - -AIRFLOW_TESTS_DIR = Path(os.path.dirname(os.path.realpath(__file__))).resolve() -AIRFLOW_SOURCES_ROOT_DIR = AIRFLOW_TESTS_DIR.parent.parent - -os.environ["AIRFLOW__CORE__PLUGINS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "plugins") -os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "dags") -os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" -os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" -os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or "/files/airflow-breeze-config/keys" -os.environ["AIRFLOW_ENABLE_AIP_44"] = os.environ.get("AIRFLOW_ENABLE_AIP_44") or "true" - -if platform.system() == "Darwin": - # mocks from unittest.mock work correctly in subprocesses only if they are created by "fork" method - # but macOS uses "spawn" by default - os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork" +pytest_plugins = "dev.tests_common.pytest_plugin" # Ignore files that are really test dags to be ignored by pytest collect_ignore = [ "tests/dags/subdir1/test_ignore_this.py", "tests/dags/test_invalid_dup_task.py", "tests/dags_corrupted/test_impersonation_custom.py", - "tests/test_utils/perf/dags/elastic_dag.py", + "dev.tests_common.test_utils/perf/dags/elastic_dag.py", ] -# https://docs.pytest.org/en/stable/reference/reference.html#stash -capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]() -forbidden_warnings_key = pytest.StashKey["ForbiddenWarningsPlugin"]() + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + dep_path = [config.rootpath.joinpath("tests", "deprecations_ignore.yml")] + config.inicfg["airflow_deprecations_ignore"] = ( + config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] + ) @pytest.fixture @@ -151,1308 +58,6 @@ def reset_environment(): os.environ[key] = init_env[key] -@pytest.fixture -def secret_key() -> str: - """Return secret key configured.""" - from airflow.configuration import conf - - the_key = conf.get("webserver", "SECRET_KEY") - if the_key is None: - raise RuntimeError( - "The secret key SHOULD be configured as `[webserver] secret_key` in the " - "configuration/environment at this stage! " - ) - return the_key - - -@pytest.fixture -def url_safe_serializer(secret_key) -> URLSafeSerializer: - return URLSafeSerializer(secret_key) - - -@pytest.fixture -def reset_db(): - """Resets Airflow db.""" - - from airflow.utils import db - - db.resetdb() - - -ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", "count"] - - -@pytest.fixture(autouse=True) -def trace_sql(request): - from tests.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip - count_queries, - trace_queries, - ) - - """Displays queries from the tests to console.""" - trace_sql_option = request.config.option.trace_sql - if not trace_sql_option: - yield - return - - terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter") - # if no terminal reporter plugin is present, nothing we can do here; - # this can happen when this function executes in a worker node - # when using pytest-xdist, for example - if terminal_reporter is None: - yield - return - - columns = [col.strip() for col in trace_sql_option.split(",")] - - def pytest_print(text): - return terminal_reporter.write_line(text) - - with ExitStack() as exit_stack: - if columns == ["num"]: - # It is very unlikely that the user wants to display only numbers, but probably - # the user just wants to count the queries. - exit_stack.enter_context(count_queries(print_fn=pytest_print)) - elif any(c in columns for c in ["time", "trace", "sql", "parameters"]): - exit_stack.enter_context( - trace_queries( - display_num="num" in columns, - display_time="time" in columns, - display_trace="trace" in columns, - display_sql="sql" in columns, - display_parameters="parameters" in columns, - print_fn=pytest_print, - ) - ) - - yield - - -@pytest.fixture(autouse=True, scope="session") -def set_db_isolation_mode(): - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - from airflow.api_internal.internal_api_call import InternalApiConfig - - InternalApiConfig.set_use_internal_api("tests", allow_tests_to_use_db=True) - - -def skip_if_database_isolation_mode(item): - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - for _ in item.iter_markers(name="skip_if_database_isolation_mode"): - pytest.skip("This test is skipped because it is not allowed in database isolation mode.") - - -def pytest_addoption(parser: pytest.Parser): - """Add options parser for custom plugins.""" - group = parser.getgroup("airflow") - group.addoption( - "--with-db-init", - action="store_true", - dest="db_init", - help="Forces database initialization before tests", - ) - group.addoption( - "--integration", - action="append", - dest="integration", - metavar="INTEGRATIONS", - help="only run tests matching integration specified: " - "[cassandra,kerberos,mongo,celery,statsd,trino]. ", - ) - group.addoption( - "--keep-env-variables", - action="store_true", - dest="keep_env_variables", - help="do not clear environment variables that might have side effect while running tests", - ) - group.addoption( - "--skip-db-tests", - action="store_true", - dest="skip_db_tests", - help="skip tests that require database", - ) - group.addoption( - "--run-db-tests-only", - action="store_true", - dest="run_db_tests_only", - help="only run tests requiring database", - ) - group.addoption( - "--backend", - action="store", - dest="backend", - metavar="BACKEND", - help="only run tests matching the backend: [sqlite,postgres,mysql].", - ) - group.addoption( - "--system", - action="append", - dest="system", - metavar="SYSTEMS", - help="only run tests matching the system specified [google.cloud, google.marketing_platform]", - ) - group.addoption( - "--include-long-running", - action="store_true", - dest="include_long_running", - help="Includes long running tests (marked with long_running marker). They are skipped by default.", - ) - group.addoption( - "--include-quarantined", - action="store_true", - dest="include_quarantined", - help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.", - ) - group.addoption( - "--exclude-virtualenv-operator", - action="store_true", - dest="exclude_virtualenv_operator", - help="Excludes virtualenv operators tests (marked with virtualenv_test marker).", - ) - group.addoption( - "--exclude-external-python-operator", - action="store_true", - dest="exclude_external_python_operator", - help="Excludes external python operator tests (marked with external_python_test marker).", - ) - allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS) - group.addoption( - "--trace-sql", - action="store", - dest="trace_sql", - help=( - "Trace SQL statements. As an argument, you must specify the columns to be " - f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]" - ), - metavar="COLUMNS", - ) - group.addoption( - "--no-db-cleanup", - action="store_false", - dest="db_cleanup", - help="Disable DB clear before each test module.", - ) - group.addoption( - "--disable-forbidden-warnings", - action="store_true", - dest="disable_forbidden_warnings", - help="Disable raising an error if forbidden warnings detected.", - ) - group.addoption( - "--disable-capture-warnings", - action="store_true", - dest="disable_capture_warnings", - help="Disable internal capture warnings.", - ) - group.addoption( - "--warning-output-path", - action="store", - dest="warning_output_path", - metavar="PATH", - help=( - "Path for resulting captured warnings. Absolute or relative to the `tests` directory. " - "If not provided or environment variable `CAPTURE_WARNINGS_OUTPUT` not set " - "then 'warnings.txt' will be used." - ), - ) - parser.addini( - name="forbidden_warnings", - type="linelist", - help="List of internal Airflow warnings which are prohibited during tests execution.", - ) - - -def initial_db_init(): - from flask import Flask - - from airflow.configuration import conf - from airflow.utils import db - from airflow.www.extensions.init_appbuilder import init_appbuilder - from airflow.www.extensions.init_auth_manager import get_auth_manager - from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS - - db.resetdb() - db.bootstrap_dagbag() - # minimal app to add roles - flask_app = Flask(__name__) - flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN") - init_appbuilder(flask_app) - if AIRFLOW_V_2_8_PLUS: - get_auth_manager().init() - - -@pytest.fixture(autouse=True, scope="session") -def initialize_airflow_tests(request): - """Helper that setups Airflow testing environment.""" - print(" AIRFLOW ".center(60, "=")) - - # Setup test environment for breeze - home = os.path.expanduser("~") - airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow") - - print(f"Home of the user: {home}\nAirflow home {airflow_home}") - - # Initialize Airflow db if required - lock_file = os.path.join(airflow_home, ".airflow_db_initialised") - if not skip_db_tests: - if request.config.option.db_init: - print("Initializing the DB - forced with --with-db-init switch.") - initial_db_init() - elif not os.path.exists(lock_file): - print( - "Initializing the DB - first time after entering the container.\n" - "You can force re-initialization the database by adding --with-db-init switch to run-tests." - ) - initial_db_init() - # Create pid file - with open(lock_file, "w+"): - pass - else: - print( - "Skipping initializing of the DB as it was initialized already.\n" - "You can re-initialize the database by adding --with-db-init flag when running tests." - ) - integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") - if integration_kerberos == "true": - # Initialize kerberos - kerberos = os.environ.get("KRB5_KTNAME") - if kerberos: - subprocess.check_call(["kinit", "-kt", kerberos, "bob@EXAMPLE.COM"]) - else: - print("Kerberos enabled! Please setup KRB5_KTNAME environment variable") - sys.exit(1) - - -def pytest_configure(config: pytest.Config) -> None: - if (backend := config.getoption("backend", default=None)) and backend not in SUPPORTED_DB_BACKENDS: - msg = ( - f"Provided DB backend {backend!r} not supported, " - f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}" - ) - pytest.exit(msg, returncode=6) - - config.addinivalue_line("markers", "integration(name): mark test to run with named integration") - config.addinivalue_line("markers", "backend(name): mark test to run with named backend") - config.addinivalue_line("markers", "system(name): mark test to run with named system") - config.addinivalue_line("markers", "platform(name): mark test to run with specific platform/environment") - config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)") - config.addinivalue_line( - "markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)" - ) - config.addinivalue_line( - "markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR" - ) - config.addinivalue_line( - "markers", "need_serialized_dag: mark tests that require dags in serialized form to be present" - ) - config.addinivalue_line( - "markers", - "db_test: mark tests that require database to be present", - ) - config.addinivalue_line( - "markers", - "non_db_test_override: you can mark individual tests with this marker to override the db_test marker", - ) - config.addinivalue_line( - "markers", - "virtualenv_operator: virtualenv operator tests are 'long', we should run them separately", - ) - config.addinivalue_line( - "markers", - "external_python_operator: external python operator tests are 'long', we should run them separately", - ) - config.addinivalue_line("markers", "enable_redact: do not mock redact secret masker") - config.addinivalue_line("markers", "skip_if_database_isolation_mode: skip if DB isolation is enabled") - - os.environ["_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK"] = "1" - - # Setup internal warnings plugins - if "ignore" in sys.warnoptions: - config.option.disable_forbidden_warnings = True - config.option.disable_capture_warnings = True - if not config.pluginmanager.get_plugin("warnings"): - # Internal forbidden warnings plugin depends on builtin pytest warnings plugin - config.option.disable_forbidden_warnings = True - - forbidden_warnings: list[str] | None = config.getini("forbidden_warnings") - if not config.option.disable_forbidden_warnings and forbidden_warnings: - from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin - - forbidden_warnings_plugin = ForbiddenWarningsPlugin( - config=config, - forbidden_warnings=tuple(map(str.strip, forbidden_warnings)), - ) - config.pluginmanager.register(forbidden_warnings_plugin) - config.stash[forbidden_warnings_key] = forbidden_warnings_plugin - - if not config.option.disable_capture_warnings: - from tests._internals.capture_warnings import CaptureWarningsPlugin - - capture_warnings_plugin = CaptureWarningsPlugin( - config=config, output_path=config.getoption("warning_output_path", default=None) - ) - config.pluginmanager.register(capture_warnings_plugin) - config.stash[capture_warnings_key] = capture_warnings_plugin - - -def pytest_unconfigure(config: pytest.Config) -> None: - os.environ.pop("_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK", None) - if forbidden_warnings_plugin := config.stash.get(forbidden_warnings_key, None): - del config.stash[forbidden_warnings_key] - config.pluginmanager.unregister(forbidden_warnings_plugin) - if capture_warnings_plugin := config.stash.get(capture_warnings_key, None): - del config.stash[capture_warnings_key] - config.pluginmanager.unregister(capture_warnings_plugin) - - -def skip_if_not_marked_with_integration(selected_integrations, item): - for marker in item.iter_markers(name="integration"): - integration_name = marker.args[0] - if integration_name in selected_integrations or "all" in selected_integrations: - return - pytest.skip( - f"The test is skipped because it does not have the right integration marker. " - f"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION " - f"being one of {selected_integrations}. {item}" - ) - - -def skip_if_not_marked_with_backend(selected_backend, item): - for marker in item.iter_markers(name="backend"): - backend_names = marker.args - if selected_backend in backend_names: - return - pytest.skip( - f"The test is skipped because it does not have the right backend marker. " - f"Only tests marked with pytest.mark.backend('{selected_backend}') are run: {item}" - ) - - -def skip_if_platform_doesnt_match(marker): - allowed_platforms = ("linux", "breeze") - if not (args := marker.args): - pytest.fail(f"No platform specified, expected one of: {', '.join(map(repr, allowed_platforms))}") - elif not all(a in allowed_platforms for a in args): - pytest.fail( - f"Allowed platforms {', '.join(map(repr, allowed_platforms))}; " - f"but got: {', '.join(map(repr, args))}" - ) - if "linux" in args: - if not sys.platform.startswith("linux"): - pytest.skip("Test expected to run on Linux platform.") - if "breeze" in args: - if not os.path.isfile("/.dockerenv") or os.environ.get("BREEZE", "").lower() != "true": - raise pytest.skip( - "Test expected to run into Airflow Breeze container. " - "Maybe because it is to dangerous to run it outside." - ) - - -def skip_if_not_marked_with_system(selected_systems, item): - for marker in item.iter_markers(name="system"): - systems_name = marker.args[0] - if systems_name in selected_systems or "all" in selected_systems: - return - pytest.skip( - f"The test is skipped because it does not have the right system marker. " - f"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM " - f"being one of {selected_systems}. {item}" - ) - - -def skip_system_test(item): - for marker in item.iter_markers(name="system"): - pytest.skip( - f"The test is skipped because it has system marker. System tests are only run when " - f"--system flag with the right system ({marker.args[0]}) is passed to pytest. {item}" - ) - - -def skip_long_running_test(item): - for _ in item.iter_markers(name="long_running"): - pytest.skip( - f"The test is skipped because it has long_running marker. " - f"And --include-long-running flag is not passed to pytest. {item}" - ) - - -def skip_quarantined_test(item): - for _ in item.iter_markers(name="quarantined"): - pytest.skip( - f"The test is skipped because it has quarantined marker. " - f"And --include-quarantined flag is not passed to pytest. {item}" - ) - - -def skip_virtualenv_operator_test(item): - for _ in item.iter_markers(name="virtualenv_operator"): - pytest.skip( - f"The test is skipped because it has virtualenv_operator marker. " - f"And --exclude-virtualenv-operator flag is not passed to pytest. {item}" - ) - - -def skip_external_python_operator_test(item): - for _ in item.iter_markers(name="external_python_operator"): - pytest.skip( - f"The test is skipped because it has external_python_operator marker. " - f"And --exclude-external-python-operator flag is not passed to pytest. {item}" - ) - - -def skip_db_test(item): - if next(item.iter_markers(name="db_test"), None): - if next(item.iter_markers(name="non_db_test_override"), None): - # non_db_test can override the db_test set for example on module or class level - return - else: - pytest.skip( - f"The test is skipped as it is DB test " - f"and --skip-db-tests is flag is passed to pytest. {item}" - ) - if next(item.iter_markers(name="backend"), None): - # also automatically skip tests marked with `backend` marker as they are implicitly - # db tests - pytest.skip( - f"The test is skipped as it is DB test " - f"and --skip-db-tests is flag is passed to pytest. {item}" - ) - - -def only_run_db_test(item): - if next(item.iter_markers(name="db_test"), None) and not next( - item.iter_markers(name="non_db_test_override"), None - ): - # non_db_test at individual level can override the db_test set for example on module or class level - return - else: - if next(item.iter_markers(name="backend"), None): - # Also do not skip the tests marked with `backend` marker - as it is implicitly a db test - return - pytest.skip( - f"The test is skipped as it is not a DB tests " - f"and --run-db-tests-only flag is passed to pytest. {item}" - ) - - -def skip_if_integration_disabled(marker, item): - integration_name = marker.args[0] - environment_variable_name = "INTEGRATION_" + integration_name.upper() - environment_variable_value = os.environ.get(environment_variable_name) - if not environment_variable_value or environment_variable_value != "true": - pytest.skip( - f"The test requires {integration_name} integration started and " - f"{environment_variable_name} environment variable to be set to true (it is '{environment_variable_value}')." - f" It can be set by specifying '--integration {integration_name}' at breeze startup" - f": {item}" - ) - - -def skip_if_wrong_backend(marker: pytest.Mark, item: pytest.Item) -> None: - if not (backend_names := marker.args): - reason = ( - "`pytest.mark.backend` expect to get at least one of the following backends: " - f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}." - ) - pytest.fail(reason) - elif unsupported_backends := list(filter(lambda b: b not in SUPPORTED_DB_BACKENDS, backend_names)): - reason = ( - "Airflow Tests supports only the following backends in `pytest.mark.backend` marker: " - f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}, " - f"but got {', '.join(map(repr, unsupported_backends))}." - ) - pytest.fail(reason) - - env_name = "BACKEND" - if not (backend := os.environ.get(env_name)) or backend not in backend_names: - reason = ( - f"The test {item.nodeid!r} requires one of {', '.join(map(repr, backend_names))} backend started " - f"and {env_name!r} environment variable to be set (currently it set to {backend!r}). " - f"It can be set by specifying backend at breeze startup." - ) - pytest.skip(reason) - - -def skip_if_credential_file_missing(item): - for marker in item.iter_markers(name="credential_file"): - credential_file = marker.args[0] - credential_path = os.path.join(os.environ.get("CREDENTIALS_DIR"), credential_file) - if not os.path.exists(credential_path): - pytest.skip(f"The test requires credential file {credential_path}: {item}") - - -def pytest_runtest_setup(item): - selected_integrations_list = item.config.option.integration - selected_systems_list = item.config.option.system - - include_long_running = item.config.option.include_long_running - include_quarantined = item.config.option.include_quarantined - exclude_virtualenv_operator = item.config.option.exclude_virtualenv_operator - exclude_external_python_operator = item.config.option.exclude_external_python_operator - - for marker in item.iter_markers(name="integration"): - skip_if_integration_disabled(marker, item) - if selected_integrations_list: - skip_if_not_marked_with_integration(selected_integrations_list, item) - if selected_systems_list: - skip_if_not_marked_with_system(selected_systems_list, item) - else: - skip_system_test(item) - for marker in item.iter_markers(name="platform"): - skip_if_platform_doesnt_match(marker) - for marker in item.iter_markers(name="backend"): - skip_if_wrong_backend(marker, item) - skip_if_database_isolation_mode(item) - selected_backend = item.config.option.backend - if selected_backend: - skip_if_not_marked_with_backend(selected_backend, item) - if not include_long_running: - skip_long_running_test(item) - if not include_quarantined: - skip_quarantined_test(item) - if exclude_virtualenv_operator: - skip_virtualenv_operator_test(item) - if exclude_external_python_operator: - skip_external_python_operator_test(item) - if skip_db_tests: - skip_db_test(item) - if run_db_tests_only: - only_run_db_test(item) - skip_if_credential_file_missing(item) - - -@pytest.fixture -def frozen_sleep(monkeypatch): - """Use time-machine to "stub" sleep. - - This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards. - - If your module under test does ``import time`` and then ``time.sleep``: - - .. code-block:: python - - def test_something(frozen_sleep): - my_mod.fn_under_test() - - If your module under test does ``from time import sleep`` then you will - have to mock that sleep function directly: - - .. code-block:: python - - def test_something(frozen_sleep, monkeypatch): - monkeypatch.setattr("my_mod.sleep", frozen_sleep) - my_mod.fn_under_test() - """ - traveller = None - - def fake_sleep(seconds): - nonlocal traveller - utcnow = datetime.now(tz=timezone.utc) - if traveller is not None: - traveller.stop() - traveller = time_machine.travel(utcnow + timedelta(seconds=seconds)) - traveller.start() - - monkeypatch.setattr("time.sleep", fake_sleep) - yield fake_sleep - - if traveller is not None: - traveller.stop() - - -@pytest.fixture(scope="session") -def app(): - from tests.test_utils.config import conf_vars - - with conf_vars({("fab", "auth_rate_limited"): "False"}): - from airflow.www import app - - yield app.create_app(testing=True) - - -@pytest.fixture -def dag_maker(request): - """Fixture to help create DAG, DagModel, and SerializedDAG automatically. - - You have to use the dag_maker as a context manager and it takes - the same argument as DAG:: - - with dag_maker(dag_id="mydag") as dag: - task1 = EmptyOperator(task_id="mytask") - task2 = EmptyOperator(task_id="mytask2") - - If the DagModel you want to use needs different parameters than the one - automatically created by the dag_maker, you have to update the DagModel as below:: - - dag_maker.dag_model.is_active = False - session.merge(dag_maker.dag_model) - session.commit() - - For any test you use the dag_maker, make sure to create a DagRun:: - - dag_maker.create_dagrun() - - The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun - - If you want to operate on serialized DAGs, then either pass - ``serialized=True`` to the ``dag_maker()`` call, or you can mark your - test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of - these cases the ``dag`` returned by the context manager will be a - lazily-evaluated proxy object to the SerializedDAG. - """ - import lazy_object_proxy - - # IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_. - # This fixture is "called" early on in the pytest collection process, and - # if we import airflow.* here the wrong (non-test) config will be loaded - # and "baked" in to various constants - - want_serialized = False - - # Allow changing default serialized behaviour with `@pytest.mark.need_serialized_dag` or - # `@pytest.mark.need_serialized_dag(False)` - serialized_marker = request.node.get_closest_marker("need_serialized_dag") - if serialized_marker: - (want_serialized,) = serialized_marker.args or (True,) - - from airflow.utils.log.logging_mixin import LoggingMixin - - class DagFactory(LoggingMixin): - _own_session = False - - def __init__(self): - from airflow.models import DagBag - - # Keep all the serialized dags we've created in this test - self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False) - - def __enter__(self): - self.dag.__enter__() - if self.want_serialized: - return lazy_object_proxy.Proxy(self._serialized_dag) - return self.dag - - def _serialized_dag(self): - return self.serialized_model.dag - - def get_serialized_data(self): - try: - data = self.serialized_model.data - except AttributeError: - raise RuntimeError("DAG serialization not requested") - if isinstance(data, str): - return json.loads(data) - return data - - def _bag_dag_compat(self, dag): - # This is a compatibility shim for the old bag_dag method in Airflow <3.0 - # TODO: Remove this when we drop support for Airflow <3.0 in Providers - if hasattr(dag, "parent_dag"): - return self.dagbag.bag_dag(dag, root_dag=dag) - return self.dagbag.bag_dag(dag) - - def __exit__(self, type, value, traceback): - from airflow.models import DagModel - from airflow.models.serialized_dag import SerializedDagModel - - dag = self.dag - dag.__exit__(type, value, traceback) - if type is not None: - return - - dag.clear(session=self.session) - dag.sync_to_db(processor_subdir=self.processor_subdir, session=self.session) - self.dag_model = self.session.get(DagModel, dag.dag_id) - - if self.want_serialized: - self.serialized_model = SerializedDagModel( - dag, processor_subdir=self.dag_model.processor_subdir - ) - self.session.merge(self.serialized_model) - serialized_dag = self._serialized_dag() - self._bag_dag_compat(serialized_dag) - self.session.flush() - else: - self._bag_dag_compat(self.dag) - - def create_dagrun(self, **kwargs): - from airflow.utils import timezone - from airflow.utils.state import State - from airflow.utils.types import DagRunType - from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS - - if AIRFLOW_V_3_0_PLUS: - from airflow.utils.types import DagRunTriggeredByType - - dag = self.dag - kwargs = { - "state": State.RUNNING, - "start_date": self.start_date, - "session": self.session, - **kwargs, - } - # Need to provide run_id if the user does not either provide one - # explicitly, or pass run_type for inference in dag.create_dagrun(). - if "run_id" not in kwargs and "run_type" not in kwargs: - kwargs["run_id"] = "test" - - if "run_type" not in kwargs: - kwargs["run_type"] = DagRunType.from_run_id(kwargs["run_id"]) - if kwargs.get("execution_date") is None: - if kwargs["run_type"] == DagRunType.MANUAL: - kwargs["execution_date"] = self.start_date - else: - kwargs["execution_date"] = dag.next_dagrun_info(None).logical_date - if "data_interval" not in kwargs: - logical_date = timezone.coerce_datetime(kwargs["execution_date"]) - if kwargs["run_type"] == DagRunType.MANUAL: - data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) - else: - data_interval = dag.infer_automated_data_interval(logical_date) - kwargs["data_interval"] = data_interval - if AIRFLOW_V_3_0_PLUS and "triggered_by" not in kwargs: - kwargs["triggered_by"] = DagRunTriggeredByType.TEST - - self.dag_run = dag.create_dagrun(**kwargs) - for ti in self.dag_run.task_instances: - ti.refresh_from_task(dag.get_task(ti.task_id)) - if self.want_serialized: - self.session.commit() - return self.dag_run - - def create_dagrun_after(self, dagrun, **kwargs): - next_info = self.dag.next_dagrun_info(self.dag.get_run_data_interval(dagrun)) - if next_info is None: - raise ValueError(f"cannot create run after {dagrun}") - return self.create_dagrun( - execution_date=next_info.logical_date, - data_interval=next_info.data_interval, - **kwargs, - ) - - def __call__( - self, - dag_id="test_dag", - schedule=timedelta(days=1), - serialized=want_serialized, - fileloc=None, - processor_subdir=None, - session=None, - **kwargs, - ): - from airflow import settings - from airflow.models.dag import DAG - from airflow.utils import timezone - - if session is None: - self._own_session = True - session = settings.Session() - - self.kwargs = kwargs - self.session = session - self.start_date = self.kwargs.get("start_date", None) - default_args = kwargs.get("default_args", None) - if default_args and not self.start_date: - if "start_date" in default_args: - self.start_date = default_args.get("start_date") - if not self.start_date: - if hasattr(request.module, "DEFAULT_DATE"): - self.start_date = getattr(request.module, "DEFAULT_DATE") - else: - DEFAULT_DATE = timezone.datetime(2016, 1, 1) - self.start_date = DEFAULT_DATE - self.kwargs["start_date"] = self.start_date - # Set schedule argument to explicitly set value, or a default if no - # other scheduling arguments are set. - self.dag = DAG(dag_id, schedule=schedule, **self.kwargs) - self.dag.fileloc = fileloc or request.module.__file__ - self.want_serialized = serialized - self.processor_subdir = processor_subdir - - return self - - def cleanup(self): - from airflow.models import DagModel, DagRun, TaskInstance, XCom - from airflow.models.serialized_dag import SerializedDagModel - from airflow.models.taskmap import TaskMap - from airflow.utils.retries import run_with_db_retries - from tests.test_utils.compat import AssetEvent - - for attempt in run_with_db_retries(logger=self.log): - with attempt: - dag_ids = list(self.dagbag.dag_ids) - if not dag_ids: - return - # To isolate problems here with problems from elsewhere on the session object - self.session.rollback() - - self.session.query(SerializedDagModel).filter( - SerializedDagModel.dag_id.in_(dag_ids) - ).delete(synchronize_session=False) - self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(DagModel).filter(DagModel.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(TaskMap).filter(TaskMap.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(AssetEvent).filter(AssetEvent.source_dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.commit() - if self._own_session: - self.session.expunge_all() - - factory = DagFactory() - - try: - yield factory - finally: - factory.cleanup() - with suppress(AttributeError): - del factory.session - - -@pytest.fixture -def create_dummy_dag(dag_maker): - """Create a `DAG` with a single `EmptyOperator` task. - - DagRun and DagModel is also created. - - Apart from the already existing arguments, any other argument in kwargs - is passed to the DAG and not to the EmptyOperator task. - - If you have an argument that you want to pass to the EmptyOperator that - is not here, please use `default_args` so that the DAG will pass it to the - Task:: - - dag, task = create_dummy_dag(default_args={"start_date": timezone.datetime(2016, 1, 1)}) - - You cannot be able to alter the created DagRun or DagModel, use `dag_maker` fixture instead. - """ - from airflow.operators.empty import EmptyOperator - from airflow.utils.types import DagRunType - - def create_dag( - dag_id="dag", - task_id="op1", - task_display_name=None, - max_active_tis_per_dag=16, - max_active_tis_per_dagrun=None, - pool="default_pool", - executor_config=None, - trigger_rule="all_done", - on_success_callback=None, - on_execute_callback=None, - on_failure_callback=None, - on_retry_callback=None, - email=None, - with_dagrun_type=DagRunType.SCHEDULED, - **kwargs, - ): - op_kwargs = {} - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - if AIRFLOW_V_2_9_PLUS: - op_kwargs["task_display_name"] = task_display_name - with dag_maker(dag_id, **kwargs) as dag: - op = EmptyOperator( - task_id=task_id, - max_active_tis_per_dag=max_active_tis_per_dag, - max_active_tis_per_dagrun=max_active_tis_per_dagrun, - executor_config=executor_config or {}, - on_success_callback=on_success_callback, - on_execute_callback=on_execute_callback, - on_failure_callback=on_failure_callback, - on_retry_callback=on_retry_callback, - email=email, - pool=pool, - trigger_rule=trigger_rule, - **op_kwargs, - ) - if with_dagrun_type is not None: - dag_maker.create_dagrun(run_type=with_dagrun_type) - return dag, op - - return create_dag - - -if TYPE_CHECKING: - from airflow.models.taskinstance import TaskInstance - - -@pytest.fixture -def create_task_instance(dag_maker, create_dummy_dag): - """Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). - - Uses ``create_dummy_dag`` to create the dag structure. - """ - from airflow.operators.empty import EmptyOperator - - def maker( - execution_date=None, - dagrun_state=None, - state=None, - run_id=None, - run_type=None, - data_interval=None, - external_executor_id=None, - dag_id="dag", - task_id="op1", - task_display_name=None, - max_active_tis_per_dag=16, - max_active_tis_per_dagrun=None, - pool="default_pool", - executor_config=None, - trigger_rule="all_done", - on_success_callback=None, - on_execute_callback=None, - on_failure_callback=None, - on_retry_callback=None, - email=None, - map_index=-1, - **kwargs, - ) -> TaskInstance: - from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS - - if AIRFLOW_V_3_0_PLUS: - from airflow.utils.types import DagRunTriggeredByType - - if execution_date is None: - from airflow.utils import timezone - - execution_date = timezone.utcnow() - with dag_maker(dag_id, **kwargs): - op_kwargs = {} - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - if AIRFLOW_V_2_9_PLUS: - op_kwargs["task_display_name"] = task_display_name - task = EmptyOperator( - task_id=task_id, - max_active_tis_per_dag=max_active_tis_per_dag, - max_active_tis_per_dagrun=max_active_tis_per_dagrun, - executor_config=executor_config or {}, - on_success_callback=on_success_callback, - on_execute_callback=on_execute_callback, - on_failure_callback=on_failure_callback, - on_retry_callback=on_retry_callback, - email=email, - pool=pool, - trigger_rule=trigger_rule, - **op_kwargs, - ) - - dagrun_kwargs = { - "execution_date": execution_date, - "state": dagrun_state, - } - dagrun_kwargs.update({"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {}) - if run_id is not None: - dagrun_kwargs["run_id"] = run_id - if run_type is not None: - dagrun_kwargs["run_type"] = run_type - if data_interval is not None: - dagrun_kwargs["data_interval"] = data_interval - dagrun = dag_maker.create_dagrun(**dagrun_kwargs) - (ti,) = dagrun.task_instances - ti.task = task - ti.state = state - ti.external_executor_id = external_executor_id - ti.map_index = map_index - - dag_maker.session.flush() - return ti - - return maker - - -@pytest.fixture -def create_serialized_task_instance_of_operator(dag_maker): - def _create_task_instance( - operator_class, - *, - dag_id, - execution_date=None, - session=None, - **operator_kwargs, - ) -> TaskInstance: - with dag_maker(dag_id=dag_id, serialized=True, session=session): - operator_class(**operator_kwargs) - if execution_date is None: - dagrun_kwargs = {} - else: - dagrun_kwargs = {"execution_date": execution_date} - (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances - return ti - - return _create_task_instance - - -@pytest.fixture -def create_task_instance_of_operator(dag_maker): - def _create_task_instance( - operator_class, - *, - dag_id, - execution_date=None, - session=None, - **operator_kwargs, - ) -> TaskInstance: - with dag_maker(dag_id=dag_id, session=session, serialized=True): - operator_class(**operator_kwargs) - if execution_date is None: - dagrun_kwargs = {} - else: - dagrun_kwargs = {"execution_date": execution_date} - (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances - return ti - - return _create_task_instance - - -@pytest.fixture -def create_task_of_operator(dag_maker): - def _create_task_of_operator(operator_class, *, dag_id, session=None, **operator_kwargs): - with dag_maker(dag_id=dag_id, session=session): - task = operator_class(**operator_kwargs) - return task - - return _create_task_of_operator - - -@pytest.fixture -def session(): - from airflow.utils.session import create_session - - with create_session() as session: - yield session - session.rollback() - - -@pytest.fixture -def get_test_dag(): - def _get(dag_id): - from airflow.models.dagbag import DagBag - from airflow.models.serialized_dag import SerializedDagModel - - dag_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dags", f"{dag_id}.py") - dagbag = DagBag(dag_folder=dag_file, include_examples=False) - - dag = dagbag.get_dag(dag_id) - dag.sync_to_db() - SerializedDagModel.write_dag(dag) - - return dag - - return _get - - -@pytest.fixture -def create_log_template(request): - from airflow import settings - from airflow.models.tasklog import LogTemplate - - session = settings.Session() - - def _create_log_template(filename_template, elasticsearch_id=""): - log_template = LogTemplate(filename=filename_template, elasticsearch_id=elasticsearch_id) - session.add(log_template) - session.commit() - - def _delete_log_template(): - from airflow.models import DagRun, TaskInstance - - session.query(TaskInstance).delete() - session.query(DagRun).delete() - session.delete(log_template) - session.commit() - - request.addfinalizer(_delete_log_template) - - return _create_log_template - - -@pytest.fixture -def reset_logging_config(): - import logging.config - - from airflow import settings - from airflow.utils.module_loading import import_string - - logging_config = import_string(settings.LOGGING_CLASS_PATH) - logging.config.dictConfig(logging_config) - - -@pytest.fixture(scope="session", autouse=True) -def suppress_info_logs_for_dag_and_fab(): - import logging - - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - dag_logger = logging.getLogger("airflow.models.dag") - dag_logger.setLevel(logging.WARNING) - - if AIRFLOW_V_2_9_PLUS: - fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") - fab_logger.setLevel(logging.WARNING) - else: - fab_logger = logging.getLogger("airflow.www.fab_security") - fab_logger.setLevel(logging.WARNING) - - -@pytest.fixture(scope="module", autouse=True) -def _clear_db(request): - """Clear DB before each test module run.""" - from tests.test_utils.db import clear_all - - if not request.config.option.db_cleanup: - return - if skip_db_tests: - return - from airflow.configuration import conf - - sql_alchemy_conn = conf.get("database", "sql_alchemy_conn") - if sql_alchemy_conn.startswith("sqlite"): - sql_alchemy_file = sql_alchemy_conn.replace("sqlite:///", "") - if not os.path.exists(sql_alchemy_file): - print(f"The sqlite file `{sql_alchemy_file}` does not exist. Attempt to initialize it.") - initial_db_init() - - dist_option = getattr(request.config.option, "dist", "no") - if dist_option != "no" or hasattr(request.config, "workerinput"): - # Skip if pytest-xdist detected (controller or worker) - return - try: - clear_all() - except Exception as ex: - exc_name_parts = [type(ex).__name__] - exc_module = type(ex).__module__ - if exc_module != "builtins": - exc_name_parts.insert(0, exc_module) - extra_msg = "" if request.config.option.db_init else ", try to run with flag --with-db-init" - pytest.exit(f"Unable clear test DB{extra_msg}, got error {'.'.join(exc_name_parts)}: {ex}") - - -@pytest.fixture(autouse=True) -def clear_lru_cache(): - from airflow.executors.executor_loader import ExecutorLoader - from airflow.utils.entry_points import _get_grouped_entry_points - - ExecutorLoader.validate_database_executor_compatibility.cache_clear() - try: - _get_grouped_entry_points.cache_clear() - try: - yield - finally: - _get_grouped_entry_points.cache_clear() - finally: - ExecutorLoader.validate_database_executor_compatibility.cache_clear() - - -@pytest.fixture(autouse=True) -def refuse_to_run_test_from_wrongly_named_files(request: pytest.FixtureRequest): - filepath = request.node.path - is_system_test: bool = "tests/system/" in os.fspath(filepath) - test_name = request.node.name - if request.node.cls: - test_name = f"{request.node.cls.__name__}.{test_name}" - if is_system_test and not filepath.name.startswith(("example_", "test_")): - pytest.fail( - f"All test method files in tests/system must start with 'example_' or 'test_'. " - f"Seems that {os.fspath(filepath)!r} contains {test_name!r} that looks like a test case. " - f"Please rename the file to follow the example_* or test_* pattern if you want to run the tests " - f"in it." - ) - elif not is_system_test and not filepath.name.startswith("test_"): - pytest.fail( - f"All test method files in tests/ must start with 'test_'. Seems that {os.fspath(filepath)!r} " - f"contains {test_name!r} that looks like a test case. Please rename the file to " - f"follow the test_* pattern if you want to run the tests in it." - ) - - -@pytest.fixture(autouse=True) -def initialize_providers_manager(): - from airflow.providers_manager import ProvidersManager - - ProvidersManager().initialize_providers_configuration() - - -@pytest.fixture(autouse=True) -def close_all_sqlalchemy_sessions(): - from sqlalchemy.orm import close_all_sessions - - with suppress(Exception): - close_all_sessions() - yield - with suppress(Exception): - close_all_sessions() - - -@pytest.fixture -def cleanup_providers_manager(): - from airflow.providers_manager import ProvidersManager - - ProvidersManager()._cleanup() - ProvidersManager().initialize_providers_configuration() - try: - yield - finally: - ProvidersManager()._cleanup() - - -@pytest.fixture(autouse=True) -def _disable_redact(request: pytest.FixtureRequest, mocker): - """Disable redacted text in tests, except specific.""" - from airflow import settings - - if next(request.node.iter_markers("enable_redact"), None): - with pytest.MonkeyPatch.context() as mp_ctx: - mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", True) - yield - return - - mocked_redact = mocker.patch("airflow.utils.log.secrets_masker.SecretsMasker.redact") - mocked_redact.side_effect = lambda item, name=None, max_depth=None: item - with pytest.MonkeyPatch.context() as mp_ctx: - mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", False) - yield - return - - -@pytest.fixture -def airflow_root_path() -> Path: - import airflow - - return Path(airflow.__path__[0]).parent - - -@pytest.fixture -def hook_lineage_collector(): - from airflow.lineage import hook - - hook._hook_lineage_collector = None - hook._hook_lineage_collector = hook.HookLineageCollector() - yield hook.get_hook_lineage_collector() - hook._hook_lineage_collector = None - - -# This constant is set to True if tests are run with Airflow installed from Packages rather than running -# the tests within Airflow sources. While most tests in CI are run using Airflow sources, there are -# also compatibility tests that only use `tests` package and run against installed packages of Airflow in -# for supported Airflow versions. -RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES = not (Path(__file__).parents[1] / "airflow" / "__init__.py").exists() - - if TYPE_CHECKING: # Static checkers do not know about pytest fixtures' types and return, # In case if them distributed through third party packages. @@ -1489,15 +94,3 @@ def requests_mock() -> RequestsMockFixture: ... # time-machine @pytest.fixture # type: ignore[no-redef] def time_machine() -> TimeMachineFixture: ... - - -@pytest.fixture -def clean_dags_and_dagruns(): - """Fixture that cleans the database before and after every test.""" - from tests.test_utils.db import clear_db_dags, clear_db_runs - - clear_db_runs() - clear_db_dags() - yield # Test runs here - clear_db_dags() - clear_db_runs() diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index 1590fb7ee231..0690a5f0182d 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -42,8 +42,6 @@ write_default_airflow_configuration_if_needed, ) from airflow.providers_manager import ProvidersManager -from tests.test_utils.config import conf_vars -from tests.test_utils.reset_warning_registry import reset_warning_registry from tests.utils.test_config import ( remove_all_configurations, set_deprecated_options, @@ -51,6 +49,9 @@ use_config, ) +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.reset_warning_registry import reset_warning_registry + HOME_DIR = os.path.expanduser("~") # The conf has been updated with sql_alchemy_con and deactivate_stale_dags_interval to test the diff --git a/tests/core/test_core.py b/tests/core/test_core.py index d44235f95524..13b85a9ef170 100644 --- a/tests/core/test_core.py +++ b/tests/core/test_core.py @@ -32,7 +32,8 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/core/test_example_dags_system.py b/tests/core/test_example_dags_system.py index 48a936f794f9..c60b7325b125 100644 --- a/tests/core/test_example_dags_system.py +++ b/tests/core/test_example_dags_system.py @@ -19,7 +19,7 @@ import pytest -from tests.test_utils.system_tests_class import SystemTest +from dev.tests_common.test_utils.system_tests_class import SystemTest @pytest.mark.system("core") diff --git a/tests/core/test_impersonation_tests.py b/tests/core/test_impersonation_tests.py index 721d180ce7d3..cf9359c86337 100644 --- a/tests/core/test_impersonation_tests.py +++ b/tests/core/test_impersonation_tests.py @@ -35,7 +35,8 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils import db + +from dev.tests_common.test_utils import db # The entire module into the quarantined mark, this might have unpredictable side effects to other tests # and should be moved into the isolated environment into the future. diff --git a/tests/core/test_logging_config.py b/tests/core/test_logging_config.py index c972532fe2ce..a316130ecdab 100644 --- a/tests/core/test_logging_config.py +++ b/tests/core/test_logging_config.py @@ -29,7 +29,8 @@ import pytest from airflow.configuration import conf -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SETTINGS_FILE_VALID = """ LOGGING_CONFIG = { diff --git a/tests/core/test_otel_tracer.py b/tests/core/test_otel_tracer.py index db675dd4293e..b9612c49dae9 100644 --- a/tests/core/test_otel_tracer.py +++ b/tests/core/test_otel_tracer.py @@ -26,7 +26,8 @@ from airflow.traces import TRACEPARENT, TRACESTATE, otel_tracer, utils from airflow.traces.tracer import Trace -from tests.test_utils.config import env_vars + +from dev.tests_common.test_utils.config import env_vars @pytest.fixture diff --git a/tests/core/test_sentry.py b/tests/core/test_sentry.py index b9a1ff7af8d7..c67b49980aae 100644 --- a/tests/core/test_sentry.py +++ b/tests/core/test_sentry.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.module_loading import import_string from airflow.utils.state import State -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars EXECUTION_DATE = timezone.utcnow() SCHEDULE_INTERVAL = datetime.timedelta(days=1) diff --git a/tests/core/test_settings.py b/tests/core/test_settings.py index cdcf52df16d8..619d64630029 100644 --- a/tests/core/test_settings.py +++ b/tests/core/test_settings.py @@ -33,7 +33,8 @@ from airflow.exceptions import AirflowClusterPolicyViolation, AirflowConfigException from airflow.settings import _ENABLE_AIP_44, TracebackSession, is_usage_data_collection_enabled from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SETTINGS_FILE_POLICY = """ def test_policy(task_instance): diff --git a/tests/core/test_sqlalchemy_config.py b/tests/core/test_sqlalchemy_config.py index 154eb0a5f893..5fed8745fd08 100644 --- a/tests/core/test_sqlalchemy_config.py +++ b/tests/core/test_sqlalchemy_config.py @@ -25,7 +25,8 @@ from airflow import settings from airflow.api_internal.internal_api_call import InternalApiConfig from airflow.exceptions import AirflowConfigException -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SQL_ALCHEMY_CONNECT_ARGS = {"test": 43503, "dict": {"is": 1, "supported": "too"}} diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py index ec7ff0cab08c..9a218010a189 100644 --- a/tests/core/test_stats.py +++ b/tests/core/test_stats.py @@ -36,7 +36,8 @@ PatternAllowListValidator, PatternBlockListValidator, ) -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class CustomStatsd(statsd.StatsClient): diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 1d3fefdf12d5..81b94dd18b18 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -60,9 +60,10 @@ from airflow.utils.session import create_session from tests.core.test_logging_config import SETTINGS_FILE_VALID, settings_context from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import ParseImportError -from tests.test_utils.config import conf_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( clear_db_callbacks, clear_db_dags, clear_db_import_errors, diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index d7b2b2116653..c3a0f5a26e32 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -39,10 +39,11 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -50,7 +51,7 @@ clear_db_runs, clear_db_serialized_dags, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py index 4a2c6b56a365..b08e61e3bbdd 100644 --- a/tests/dags/test_miscellaneous.py +++ b/tests/dags/test_miscellaneous.py @@ -23,7 +23,8 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator args = { "owner": "airflow", diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py index 07c9cc7efdff..7c9616040266 100644 --- a/tests/dags/test_sensor.py +++ b/tests/dags/test_sensor.py @@ -21,7 +21,8 @@ from airflow.decorators import task from airflow.models.dag import DAG from airflow.utils import timezone -from tests.test_utils.compat import DateTimeSensor + +from dev.tests_common.test_utils.compat import DateTimeSensor with DAG( dag_id="test_sensor", start_date=datetime.datetime(2022, 1, 1), catchup=False, schedule="@once" diff --git a/tests/decorators/test_bash.py b/tests/decorators/test_bash.py index ba8948936eda..da79fb4cca0f 100644 --- a/tests/decorators/test_bash.py +++ b/tests/decorators/test_bash.py @@ -29,7 +29,8 @@ from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.utils import timezone from airflow.utils.types import NOTSET -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields DEFAULT_DATE = timezone.datetime(2023, 1, 1) diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index adbf96a0f41b..d34fbbf552b3 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -40,7 +40,8 @@ from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY from tests.operators.test_python import BasePythonTest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/deprecations_ignore.yml b/tests/deprecations_ignore.yml index 8be939227d81..c3d5ddd8eafa 100644 --- a/tests/deprecations_ignore.yml +++ b/tests/deprecations_ignore.yml @@ -51,116 +51,3 @@ - tests/www/views/test_views_rendered.py::test_rendered_task_detail_env_secret - tests/www/views/test_views_tasks.py::test_rendered_task_view - tests/www/views/test_views_tasks.py::test_views_get - - -# Providers -- tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py::TestAwsBaseAsyncHook::test_get_client_async -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_cluster_status -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status_exception -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_pause_cluster -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster_exception -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success -- tests/providers/google/common/auth_backend/test_google_openid.py::TestGoogleOpenID::test_success -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs_duplication_warning -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_queries -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_completed -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_timeout -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_jobs_to_cancel -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_get_dataset_tables_list -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_and_write_disposition -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_options -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_source_format -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_extract -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_load_with_non_csv_as_src_fmt -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options_incorrect -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params_fails -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_without_sql_fails -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_table_delete -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookRunWithConfiguration::test_run_with_configuration_location -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_create_external_table_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_copy_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_load_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_query_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_description -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_labels -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_description -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_labels -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestDatasetsOperations::test_patch_dataset -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTableOperations::test_patch_table -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_with_auto_detect -- tests/providers/google/cloud/hooks/test_gcs.py::TestGCSHook::test_list__error_match_glob_and_invalid_delimiter -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_error_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_run_pipeline_immediately_complete -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_waiting_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_delegate_to_runtime_error -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_error_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_location_path -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_run_pipeline_immediately_complete -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_waiting_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_run_pipeline -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_cancel_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_create_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_delete_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_get_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_list_pipeline_jobs -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_cancel_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_create_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_delete_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_get_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_list_pipeline_jobs -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHadoopOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPySparkOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute_override_project_id -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataprocClusterScaleOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::test_create_cluster_operator_extra_links -- tests/providers/google/cloud/operators/test_dataproc.py::test_scale_cluster_operator_extra_links -- tests/providers/google/cloud/operators/test_dataproc.py::test_submit_spark_job_operator_extra_links -- tests/providers/google/cloud/operators/test_gcs.py::TestGoogleCloudStorageListOperator::test_execute__delimiter -- tests/providers/google/cloud/operators/test_kubernetes_engine.py::TestGoogleCloudPlatformContainerOperator::test_create_execute_error_body -- tests/providers/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes -- tests/providers/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes_without_project_id -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_copy_files_into_a_folder -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_last_modified_time -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_more_than_1_wildcard -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_prefix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_suffix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_prefix_and_suffix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_empty_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object_retained_prefix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false_with_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_without_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_a_delimiter -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_delimiter_and_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_different_delimiter_and_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_all_true_cond -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_one_true_cond -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_no_last_modified_time diff --git a/tests/executors/test_base_executor.py b/tests/executors/test_base_executor.py index 04a70c5e8368..da7422737ac4 100644 --- a/tests/executors/test_base_executor.py +++ b/tests/executors/test_base_executor.py @@ -145,7 +145,7 @@ def test_gauge_executor_metrics_single_executor(mock_stats_gauge, mock_trigger_t @mock.patch("airflow.executors.sequential_executor.SequentialExecutor.sync") @mock.patch("airflow.executors.base_executor.BaseExecutor.trigger_tasks") @mock.patch("airflow.executors.base_executor.Stats.gauge") -@mock.patch("airflow.executors.executor_loader.ExecutorLoader.get_executor_names") +@mock.patch("airflow.executors.base_executor.ExecutorLoader.get_executor_names") def test_gauge_executor_metrics_with_multiple_executors( mock_get_executor_names, mock_stats_gauge, diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index 2192487a01cf..d9bf81dd3116 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -29,7 +29,8 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/integration/cli/commands/test_celery_command.py b/tests/integration/cli/commands/test_celery_command.py index 61866184337d..186addedb116 100644 --- a/tests/integration/cli/commands/test_celery_command.py +++ b/tests/integration/cli/commands/test_celery_command.py @@ -25,7 +25,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import celery_command from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.integration("celery") diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index 4ec1cc458c3a..169d3e91356c 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -45,7 +45,8 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.state import State, TaskInstanceState -from tests.test_utils import db + +from dev.tests_common.test_utils import db logger = logging.getLogger(__name__) diff --git a/tests/integration/security/test_kerberos.py b/tests/integration/security/test_kerberos.py index 033b455b56ea..d16b8bc332c7 100644 --- a/tests/integration/security/test_kerberos.py +++ b/tests/integration/security/test_kerberos.py @@ -26,7 +26,8 @@ from airflow.security import kerberos from airflow.security.kerberos import renew_from_kt -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.integration("kerberos") diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 2a5a6689a98f..f1bb6f17d05c 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -59,16 +59,17 @@ from airflow.utils.types import DagRunType from tests.listeners import dag_listener from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_pools, clear_db_runs, clear_db_xcom, set_default_pool_slots, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_base_job.py b/tests/jobs/test_base_job.py index e9c9fe94ce73..20079f4f235e 100644 --- a/tests/jobs/test_base_job.py +++ b/tests/jobs/test_base_job.py @@ -33,9 +33,10 @@ from airflow.utils.session import create_session from airflow.utils.state import State from tests.listeners import lifecycle_listener -from tests.test_utils.config import conf_vars from tests.utils.test_helpers import MockJobRunner, SchedulerJobRunner, TriggererJobRunner +from dev.tests_common.test_utils.config import conf_vars + if TYPE_CHECKING: from airflow.serialization.pydantic.job import JobPydantic diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 1a0e186264a7..577fddd84d2b 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -52,11 +52,12 @@ from airflow.utils.state import State from airflow.utils.timeout import timeout from airflow.utils.types import DagRunType -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_executor import MockExecutor + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index bc2be0a12c74..a5748ebaeef9 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -72,10 +72,12 @@ from tests.listeners import dag_listener from tests.listeners.test_listeners import get_listener_manager from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( +from tests.utils.test_timezone import UTC + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_assets, clear_db_backfills, clear_db_dags, @@ -87,9 +89,8 @@ clear_db_sla_miss, set_default_pool_slots, ) -from tests.test_utils.mock_executor import MockExecutor -from tests.test_utils.mock_operators import CustomOperator -from tests.utils.test_timezone import UTC +from dev.tests_common.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_operators import CustomOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_triggerer_job.py b/tests/jobs/test_triggerer_job.py index 84b422342e4c..da8405e19731 100644 --- a/tests/jobs/test_triggerer_job.py +++ b/tests/jobs/test_triggerer_job.py @@ -47,7 +47,8 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType from tests.core.test_logging_config import reset_logging -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/jobs/test_triggerer_job_logging.py b/tests/jobs/test_triggerer_job_logging.py index a039c43fb48c..4f39047f050e 100644 --- a/tests/jobs/test_triggerer_job_logging.py +++ b/tests/jobs/test_triggerer_job_logging.py @@ -30,7 +30,8 @@ from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import RedirectStdHandler from airflow.utils.log.trigger_handler import DropTriggerLogsFilter, TriggererHandlerWrapper -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def non_pytest_handlers(val): diff --git a/tests/lineage/test_hook.py b/tests/lineage/test_hook.py index c076b19aeced..3fbbfaa021ff 100644 --- a/tests/lineage/test_hook.py +++ b/tests/lineage/test_hook.py @@ -33,7 +33,8 @@ NoOpCollector, get_hook_lineage_collector, ) -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager class TestHookLineageCollector: diff --git a/tests/lineage/test_lineage.py b/tests/lineage/test_lineage.py index c99c97ceb4e0..ac42b2ee7f81 100644 --- a/tests/lineage/test_lineage.py +++ b/tests/lineage/test_lineage.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.context import Context from airflow.utils.types import DagRunType -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/listeners/class_listener.py b/tests/listeners/class_listener.py index ececa853213a..2b9cefa178d7 100644 --- a/tests/listeners/class_listener.py +++ b/tests/listeners/class_listener.py @@ -19,7 +19,8 @@ from airflow.listeners import hookimpl from airflow.utils.state import DagRunState, TaskInstanceState -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS if AIRFLOW_V_2_10_PLUS: diff --git a/tests/listeners/test_dag_import_error_listener.py b/tests/listeners/test_dag_import_error_listener.py index 57fd4f79dd52..886044d27b44 100644 --- a/tests/listeners/test_dag_import_error_listener.py +++ b/tests/listeners/test_dag_import_error_listener.py @@ -32,8 +32,9 @@ from airflow.models.errors import ParseImportError from airflow.utils import timezone from tests.listeners import dag_import_error_listener -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -42,7 +43,7 @@ clear_db_serialized_dags, clear_db_sla_miss, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor pytestmark = pytest.mark.db_test diff --git a/tests/models/test_backfill.py b/tests/models/test_backfill.py index c45625db335d..0f471fbd5654 100644 --- a/tests/models/test_backfill.py +++ b/tests/models/test_backfill.py @@ -33,7 +33,13 @@ ) from airflow.operators.python import PythonOperator from airflow.utils.state import DagRunState -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] diff --git a/tests/models/test_base.py b/tests/models/test_base.py index 3224616404d6..27eeba2912f5 100644 --- a/tests/models/test_base.py +++ b/tests/models/test_base.py @@ -19,7 +19,8 @@ import pytest from airflow.models.base import get_id_collation_args -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index 3c5b7634d5a9..999529e14a99 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -50,7 +50,8 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.mock_operators import DeprecatedOperator, MockOperator + +from dev.tests_common.test_utils.mock_operators import DeprecatedOperator, MockOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index 380c3c28c514..810453053dd0 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -35,8 +35,9 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index c79ca24e03a2..997ef06329fd 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -102,13 +102,19 @@ StaticTestPriorityWeightStrategy, TestPriorityWeightStrategyPlugin, ) -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_assets, clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.mapping import expand_mapped_task -from tests.test_utils.mock_plugins import mock_plugin_manager -from tests.test_utils.timetables import cron_timetable, delta_timetable + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( + clear_db_assets, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) +from dev.tests_common.test_utils.mapping import expand_mapped_task +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from dev.tests_common.test_utils.timetables import cron_timetable, delta_timetable if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -2296,14 +2302,14 @@ def test_next_dagrun_info_on_29_feb(self): ) def test_access_control_format(self, fab_version, perms, expected_exception, expected_perms): if expected_exception: - with patch("airflow.models.dag.FAB_VERSION", fab_version): + with patch("airflow.providers.fab.__version__", fab_version): with pytest.raises( expected_exception, match="Please upgrade the FAB provider to a version >= 1.3.0 to allow use the Dag Level Access Control new format.", ): DAG(dag_id="dag_test", schedule=None, access_control=perms) else: - with patch("airflow.models.dag.FAB_VERSION", fab_version): + with patch("airflow.providers.fab.__version__", fab_version): dag = DAG(dag_id="dag_test", schedule=None, access_control=perms) assert dag.access_control == expected_perms diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 0179fa865291..0b477cc24c63 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -46,9 +46,10 @@ from airflow.www.security_appless import ApplessAirflowSecurityManager from tests import cluster_policies from tests.models import TEST_DAGS_FOLDER -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_dagcode.py b/tests/models/test_dagcode.py index e566e9ceed08..0bc105cfaf42 100644 --- a/tests/models/test_dagcode.py +++ b/tests/models/test_dagcode.py @@ -30,7 +30,8 @@ # To move it to a shared module. from airflow.utils.file import open_maybe_zipped from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_dag_code + +from dev.tests_common.test_utils.db import clear_db_dag_code pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index dac2982b0ba3..46773232f77f 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -47,10 +47,11 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE as _DEFAULT_DATE -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dagwarning.py b/tests/models/test_dagwarning.py index d3ef85d351a3..9b98e0c31d74 100644 --- a/tests/models/test_dagwarning.py +++ b/tests/models/test_dagwarning.py @@ -25,7 +25,8 @@ from airflow.models import DagModel from airflow.models.dagwarning import DagWarning -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_mappedoperator.py b/tests/models/test_mappedoperator.py index 0571e07e671f..927dbfd73a56 100644 --- a/tests/models/test_mappedoperator.py +++ b/tests/models/test_mappedoperator.py @@ -42,8 +42,13 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.xcom import XCOM_RETURN_KEY from tests.models import DEFAULT_DATE -from tests.test_utils.mapping import expand_mapped_task -from tests.test_utils.mock_operators import MockOperator, MockOperatorWithNestedFields, NestedFields + +from dev.tests_common.test_utils.mapping import expand_mapped_task +from dev.tests_common.test_utils.mock_operators import ( + MockOperator, + MockOperatorWithNestedFields, + NestedFields, +) pytestmark = pytest.mark.db_test diff --git a/tests/models/test_param.py b/tests/models/test_param.py index 3d85a957ec5d..2d324478deef 100644 --- a/tests/models/test_param.py +++ b/tests/models/test_param.py @@ -26,7 +26,8 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom class TestParam: diff --git a/tests/models/test_pool.py b/tests/models/test_pool.py index 3bd3b4f5ffb0..22852261671c 100644 --- a/tests/models/test_pool.py +++ b/tests/models/test_pool.py @@ -27,7 +27,13 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.db import clear_db_dags, clear_db_pools, clear_db_runs, set_default_pool_slots + +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_pools, + clear_db_runs, + set_default_pool_slots, +) pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index 1de83954ee4b..ea22d31871db 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -34,8 +34,9 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timezone import datetime -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields pytestmark = pytest.mark.db_test diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index 93845e95832c..5e6714feda37 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -36,8 +36,9 @@ from airflow.settings import json from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import create_session -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count pytestmark = pytest.mark.db_test diff --git a/tests/models/test_skipmixin.py b/tests/models/test_skipmixin.py index c24bfb7c8644..0f2406c5737c 100644 --- a/tests/models/test_skipmixin.py +++ b/tests/models/test_skipmixin.py @@ -31,7 +31,8 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index c09d3575d1ec..03566dca30bc 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -99,11 +99,12 @@ from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY from tests.models import DEFAULT_DATE, TEST_DAGS_FOLDER -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_timestamp.py b/tests/models/test_timestamp.py index a2d4a92f1666..4888c2fdac26 100644 --- a/tests/models/test_timestamp.py +++ b/tests/models/test_timestamp.py @@ -25,7 +25,8 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import State -from tests.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_trigger.py b/tests/models/test_trigger.py index 407d6edd753a..b44e4b9f6e88 100644 --- a/tests/models/test_trigger.py +++ b/tests/models/test_trigger.py @@ -42,7 +42,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_variable.py b/tests/models/test_variable.py index e9509b4569db..dfe448092c84 100644 --- a/tests/models/test_variable.py +++ b/tests/models/test_variable.py @@ -27,8 +27,9 @@ from airflow.models import Variable, crypto, variable from airflow.secrets.cache import SecretCache from airflow.secrets.metastore import MetastoreBackend -from tests.test_utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom.py b/tests/models/test_xcom.py index 07533ec944be..17ea5fa4ff11 100644 --- a/tests/models/test_xcom.py +++ b/tests/models/test_xcom.py @@ -35,8 +35,9 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.config import conf_vars -from tests.www.test_utils import is_db_isolation_mode + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import is_db_isolation_mode pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py index fcc2e546009c..b161020d1fb9 100644 --- a/tests/models/test_xcom_arg.py +++ b/tests/models/test_xcom_arg.py @@ -22,8 +22,9 @@ from airflow.operators.python import PythonOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_branch_operator.py b/tests/operators/test_branch_operator.py index 377473e0cc38..8943bf580e50 100644 --- a/tests/operators/test_branch_operator.py +++ b/tests/operators/test_branch_operator.py @@ -28,7 +28,8 @@ from airflow.utils.state import State from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_email.py b/tests/operators/test_email.py index 04c4cf7fd8f6..c86e0f94f614 100644 --- a/tests/operators/test_email.py +++ b/tests/operators/test_email.py @@ -24,7 +24,8 @@ from airflow.operators.email import EmailOperator from airflow.utils import timezone -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index c877d7bed99c..e1281ed4b247 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -60,7 +60,7 @@ def teardown_method(self): ], ) def test_mysql_to_mysql(self, client): - from tests.providers.mysql.hooks.test_mysql import MySqlContext + from providers.tests.mysql.hooks.test_mysql import MySqlContext with MySqlContext(client): sql = "SELECT * FROM connection;" diff --git a/tests/operators/test_latest_only_operator.py b/tests/operators/test_latest_only_operator.py index 58a43ae7e66b..78fabc4ca922 100644 --- a/tests/operators/test_latest_only_operator.py +++ b/tests/operators/test_latest_only_operator.py @@ -30,8 +30,9 @@ from airflow.utils.state import State from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index c98dc7018a4a..3dd8f506ee98 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -68,9 +68,10 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import NOTSET, DagRunType -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py index 995b14e23e75..fd474766d41e 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/test_plugin.py @@ -35,7 +35,8 @@ from airflow.timetables.interval import CronDataIntervalTimetable from tests.listeners import empty_listener from tests.listeners.class_listener import ClassBasedListener -from tests.test_utils.mock_operators import ( + +from dev.tests_common.test_utils.mock_operators import ( AirflowLink, AirflowLink2, CustomBaseIndexOpLink, diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index 7e4bedbfb8c1..1c54c3ebb195 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -33,8 +33,9 @@ from airflow.plugins_manager import AirflowPlugin from airflow.utils.module_loading import qualname from airflow.www import app as application -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = pytest.mark.db_test diff --git a/tests/secrets/test_cache.py b/tests/secrets/test_cache.py index 8d4508086869..40ab4aa290df 100644 --- a/tests/secrets/test_cache.py +++ b/tests/secrets/test_cache.py @@ -22,7 +22,8 @@ import pytest from airflow.secrets.cache import SecretCache -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def test_cache_disabled_by_default(): diff --git a/tests/security/test_kerberos.py b/tests/security/test_kerberos.py index b424edb82089..2b661fe9a147 100644 --- a/tests/security/test_kerberos.py +++ b/tests/security/test_kerberos.py @@ -25,7 +25,8 @@ from airflow.security import kerberos from airflow.security.kerberos import get_kerberos_principle, renew_from_kt -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py index 43c8fa5a64bd..8a3b2b5c5c0f 100644 --- a/tests/sensors/test_base.py +++ b/tests/sensors/test_base.py @@ -57,7 +57,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = pytest.mark.db_test @@ -68,7 +69,7 @@ TEST_DAG_ID = "unit_test_dag" DUMMY_OP = "dummy_op" SENSOR_OP = "sensor_op" -DEV_NULL = "dev/null" +DEV_NULL = "/dev/null" @pytest.fixture diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 9947a197a033..f1739e6b8441 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -52,9 +52,10 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index f0f517042314..35f5e196a222 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -80,9 +80,14 @@ from airflow.utils.operator_resources import Resources from airflow.utils.task_group import TaskGroup from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.compat import BaseOperatorLink -from tests.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator -from tests.test_utils.timetables import CustomSerializationTimetable, cron_timetable, delta_timetable + +from dev.tests_common.test_utils.compat import BaseOperatorLink +from dev.tests_common.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator +from dev.tests_common.test_utils.timetables import ( + CustomSerializationTimetable, + cron_timetable, + delta_timetable, +) if TYPE_CHECKING: from airflow.utils.context import Context @@ -185,7 +190,9 @@ "max_retry_delay": 600.0, "downstream_task_ids": [], "_is_empty": False, - "_operator_extra_links": [{"tests.test_utils.mock_operators.CustomOpLink": {}}], + "_operator_extra_links": [ + {"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}} + ], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], @@ -193,7 +200,7 @@ "template_fields_renderers": {}, "_task_type": "CustomOperator", "_operator_name": "@custom", - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "pool": "default_pool", "is_setup": False, "is_teardown": False, @@ -233,7 +240,7 @@ ) CUSTOM_TIMETABLE_SERIALIZED = { - "__type": "tests.test_utils.timetables.CustomSerializationTimetable", + "__type": "dev.tests_common.test_utils.timetables.CustomSerializationTimetable", "__var": {"value": "foo"}, } @@ -310,8 +317,8 @@ def get_excluded_patterns() -> Generator[str, None, None]: if python_version in provider_info.get("excluded-python-versions"): provider_path = provider.replace(".", "/") yield f"airflow/providers/{provider_path}/" - yield f"tests/providers/{provider_path}/" - yield f"tests/system/providers/{provider_path}/" + yield f"providers/tests/{provider_path}/" + yield f"providers/tests/system/{provider_path}/" def collect_dags(dag_folder=None): @@ -328,10 +335,10 @@ def collect_dags(dag_folder=None): else: patterns = [ "airflow/example_dags", - "airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed - "airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed - "tests/system/providers/*/", - "tests/system/providers/*/*/", + "providers/src/airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed + "providers/src/airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed + "providers/tests/system/*/", + "providers/tests/system/*/*/", ] excluded_patterns = [f"{ROOT_FOLDER}/{excluded_pattern}" for excluded_pattern in get_excluded_patterns()] for pattern in patterns: @@ -366,7 +373,7 @@ def timetable_plugin(monkeypatch): monkeypatch.setattr( plugins_manager, "timetable_classes", - {"tests.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, + {"dev.tests_common.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, ) @@ -462,7 +469,7 @@ def test_dag_serialization_unregistered_custom_timetable(self): message = ( "Failed to serialize DAG 'simple_dag': Timetable class " - "'tests.test_utils.timetables.CustomSerializationTimetable' " + "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -529,8 +536,8 @@ def test_deserialization_across_process(self): def test_roundtrip_provider_example_dags(self): dags = collect_dags( [ - "airflow/providers/*/example_dags", - "airflow/providers/*/*/example_dags", + "src/providers/airflow/providers/*/example_dags", + "src/providers/airflow/providers/*/*/example_dags", ] ) @@ -827,7 +834,7 @@ def test_deserialization_timetable_unregistered(self): SerializedDAG.from_dict(serialized) message = ( "Timetable class " - "'tests.test_utils.timetables.CustomSerializationTimetable' " + "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -979,15 +986,15 @@ def test_task_params_roundtrip(self, val, expected_val): [ pytest.param( "true", - [{"tests.test_utils.mock_operators.CustomOpLink": {}}], + [{"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}}], {"Google Custom": "http://google.com/custom_base_link?search=true"}, id="non-indexed-link", ), pytest.param( ["echo", "true"], [ - {"tests.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, - {"tests.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, + {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, + {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, ], { "BigQuery Console #1": "https://console.cloud.google.com/bigquery?j=echo", @@ -1290,7 +1297,7 @@ def test_operator_deserialize_old_names(self): "template_fields": ["bash_command"], "template_fields_renderers": {}, "_task_type": "CustomOperator", - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "pool": "default_pool", "ui_color": "#fff", "ui_fgcolor": "#000", @@ -2347,7 +2354,7 @@ def test_operator_expand_xcomarg_serde(): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2402,7 +2409,7 @@ def test_operator_expand_kwargs_literal_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2457,7 +2464,7 @@ def test_operator_expand_kwargs_xcomarg_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2820,7 +2827,7 @@ def operator_extra_links(self): "_disallow_kwargs_override": False, "_expand_input_attr": "expand_input", "downstream_task_ids": [], - "_operator_extra_links": [{"tests.test_utils.mock_operators.AirflowLink2": {}}], + "_operator_extra_links": [{"dev.tests_common.test_utils.mock_operators.AirflowLink2": {}}], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], diff --git a/tests/serialization/test_pydantic_models.py b/tests/serialization/test_pydantic_models.py index 55c61ea22026..423dc41a3806 100644 --- a/tests/serialization/test_pydantic_models.py +++ b/tests/serialization/test_pydantic_models.py @@ -45,7 +45,8 @@ from airflow.utils.state import State from airflow.utils.types import AttributeRemoved, DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/serialization/test_serde.py b/tests/serialization/test_serde.py index a36013d20cfa..5c915839c642 100644 --- a/tests/serialization/test_serde.py +++ b/tests/serialization/test_serde.py @@ -43,7 +43,8 @@ serialize, ) from airflow.utils.module_loading import import_string, iter_namespace, qualname -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/serialization/test_serialized_objects.py b/tests/serialization/test_serialized_objects.py index 0bc8a67ef879..67fdceeaf08d 100644 --- a/tests/serialization/test_serialized_objects.py +++ b/tests/serialization/test_serialized_objects.py @@ -471,7 +471,8 @@ def test_all_pydantic_models_round_trip(): @pytest.mark.db_test def test_serialized_mapped_operator_unmap(dag_maker): from airflow.serialization.serialized_objects import SerializedDAG - from tests.test_utils.mock_operators import MockOperator + + from dev.tests_common.test_utils.mock_operators import MockOperator with dag_maker(dag_id="dag") as dag: MockOperator(task_id="task1", arg1="x") diff --git a/tests/system/core/example_external_task_child_deferrable.py b/tests/system/core/example_external_task_child_deferrable.py index 781ad4ea5ef1..4e8eca3b15eb 100644 --- a/tests/system/core/example_external_task_child_deferrable.py +++ b/tests/system/core/example_external_task_child_deferrable.py @@ -34,7 +34,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/core/example_external_task_parent_deferrable.py b/tests/system/core/example_external_task_parent_deferrable.py index 8896fdc1b9be..1a64f7e98ae8 100644 --- a/tests/system/core/example_external_task_parent_deferrable.py +++ b/tests/system/core/example_external_task_parent_deferrable.py @@ -57,14 +57,14 @@ start >> [trigger_child_task, external_task_sensor] >> end - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/example_empty.py b/tests/system/example_empty.py index 143c79fc2f99..de316a11eee7 100644 --- a/tests/system/example_empty.py +++ b/tests/system/example_empty.py @@ -35,14 +35,14 @@ chain(task) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/__init__.py b/tests/system/providers/microsoft/azure/__init__.py deleted file mode 100644 index 217e5db96078..000000000000 --- a/tests/system/providers/microsoft/azure/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/system/providers/papermill/__init__.py b/tests/system/providers/papermill/__init__.py deleted file mode 100644 index 217e5db96078..000000000000 --- a/tests/system/providers/papermill/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index 54b8a4d4173e..55e3d34192d6 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -42,8 +42,9 @@ from airflow.utils.timeout import timeout from tests.listeners import xcom_listener from tests.listeners.file_write_listener import FileWriteListener -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_pool_slots_available_dep.py b/tests/ti_deps/deps/test_pool_slots_available_dep.py index c81657bbb648..d7fc33aa41bf 100644 --- a/tests/ti_deps/deps/test_pool_slots_available_dep.py +++ b/tests/ti_deps/deps/test_pool_slots_available_dep.py @@ -26,7 +26,8 @@ from airflow.ti_deps.deps.pool_slots_available_dep import PoolSlotsAvailableDep from airflow.utils.session import create_session from airflow.utils.state import TaskInstanceState -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/ti_deps/deps/test_prev_dagrun_dep.py b/tests/ti_deps/deps/test_prev_dagrun_dep.py index 4354a9237d5f..638f4b69d7e9 100644 --- a/tests/ti_deps/deps/test_prev_dagrun_dep.py +++ b/tests/ti_deps/deps/test_prev_dagrun_dep.py @@ -29,8 +29,9 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import convert_to_utc, datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index 568d6abf025c..d137d43bbf38 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index 3216222909b7..53e702efd8a8 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -38,8 +38,9 @@ from airflow.utils.log.logging_mixin import ExternalLoggingMixin from airflow.utils.state import TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_secrets_masker.py b/tests/utils/log/test_secrets_masker.py index 5781139677df..f3fed73675cc 100644 --- a/tests/utils/log/test_secrets_masker.py +++ b/tests/utils/log/test_secrets_masker.py @@ -38,7 +38,8 @@ should_hide_value_for_key, ) from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.enable_redact p = "password" diff --git a/tests/utils/log/test_task_context_logger.py b/tests/utils/log/test_task_context_logger.py index deca4e55484b..3a08947ede09 100644 --- a/tests/utils/log/test_task_context_logger.py +++ b/tests/utils/log/test_task_context_logger.py @@ -24,8 +24,9 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.utils.log.task_context_logger import TaskContextLogger -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_db.py b/tests/utils/test_db.py index ce77f80297fc..0f3a56a5d1c8 100644 --- a/tests/utils/test_db.py +++ b/tests/utils/test_db.py @@ -50,7 +50,8 @@ upgradedb, ) from airflow.utils.db_manager import RunDBManager -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index 0a8cd9c90c96..3b5a3b222723 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -48,7 +48,13 @@ run_cleanup, ) from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_assets, clear_db_dags, clear_db_runs, drop_tables_with_prefix + +from dev.tests_common.test_utils.db import ( + clear_db_assets, + clear_db_dags, + clear_db_runs, + drop_tables_with_prefix, +) pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/test_db_manager.py b/tests/utils/test_db_manager.py index 1c8a6c6c7dfc..975ea65499aa 100644 --- a/tests/utils/test_db_manager.py +++ b/tests/utils/test_db_manager.py @@ -25,7 +25,8 @@ from airflow.models import Base from airflow.utils.db import downgrade, initdb from airflow.utils.db_manager import BaseDBManager, RunDBManager -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test] diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index 0376848fce82..d1521ca6e0f6 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -29,8 +29,9 @@ from airflow.utils import dot_renderer, timezone from airflow.utils.state import State from airflow.utils.task_group import TaskGroup -from tests.test_utils.compat import BashOperator -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.db import clear_db_dags START_DATE = timezone.utcnow() diff --git a/tests/utils/test_email.py b/tests/utils/test_email.py index bf5f3fc0a18f..b47dcbc87585 100644 --- a/tests/utils/test_email.py +++ b/tests/utils/test_email.py @@ -29,7 +29,8 @@ from airflow.configuration import conf from airflow.utils import email -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars EMAILS = ["test1@example.com", "test2@example.com"] diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index 50cc37ca57ce..2ffb89ec3419 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -27,7 +27,8 @@ from airflow.utils import file as file_utils from airflow.utils.file import correct_maybe_zipped, find_path_from_directory, open_maybe_zipped from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def might_contain_dag(file_path: str, zip_file: zipfile.ZipFile | None = None): diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py index 478604186e3a..0b0046b1ba08 100644 --- a/tests/utils/test_helpers.py +++ b/tests/utils/test_helpers.py @@ -37,8 +37,9 @@ validate_key, ) from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs if TYPE_CHECKING: from airflow.jobs.job import Job diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index 8de3b03a1af1..4be3564567eb 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -54,8 +54,9 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 50d44cf125b3..60823911b8b0 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowConfigException from airflow.utils import net -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def get_hostname(): diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index b6756502c10c..8d41202be254 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -28,7 +28,8 @@ from airflow.utils import timezone from airflow.utils.jwt_signer import JWTSigner from airflow.utils.serve_logs import create_app -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from flask.testing import FlaskClient diff --git a/tests/utils/test_sqlalchemy.py b/tests/utils/test_sqlalchemy.py index 4352898090fe..42717a4fb045 100644 --- a/tests/utils/test_sqlalchemy.py +++ b/tests/utils/test_sqlalchemy.py @@ -42,7 +42,8 @@ ) from airflow.utils.state import State from airflow.utils.timezone import utcnow -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_state.py b/tests/utils/test_state.py index 9356a9bbc54b..6447477ef589 100644 --- a/tests/utils/test_state.py +++ b/tests/utils/test_state.py @@ -26,7 +26,8 @@ from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index a6008dc58c03..6ea0fcdc36e1 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -39,7 +39,8 @@ from airflow.utils.dag_edges import dag_edges from airflow.utils.task_group import TASKGROUP_ARGS_EXPECTED_TYPES, TaskGroup, task_group_to_dict from tests.models import DEFAULT_DATE -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator def make_task(name, type_="classic"): @@ -1414,7 +1415,7 @@ def test_task_group_edge_modifier_chain(): def test_mapped_task_group_id_prefix_task_id(): - from tests.test_utils.mock_operators import MockOperator + from dev.tests_common.test_utils.mock_operators import MockOperator with DAG(dag_id="d", schedule=None, start_date=DEFAULT_DATE) as dag: t1 = MockOperator.partial(task_id="t1").expand(arg1=[]) diff --git a/tests/utils/test_task_handler_with_custom_formatter.py b/tests/utils/test_task_handler_with_custom_formatter.py index a328253b7f83..aee646d858b1 100644 --- a/tests/utils/test_task_handler_with_custom_formatter.py +++ b/tests/utils/test_task_handler_with_custom_formatter.py @@ -28,9 +28,10 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_types.py b/tests/utils/test_types.py index a338f6e65887..cb5849b20599 100644 --- a/tests/utils/test_types.py +++ b/tests/utils/test_types.py @@ -26,7 +26,8 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/test_app.py b/tests/www/test_app.py index 053fd1174dca..673abd371dfb 100644 --- a/tests/www/test_app.py +++ b/tests/www/test_app.py @@ -29,8 +29,9 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app as application -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index 2ba136432894..f8f0ae983382 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -18,7 +18,6 @@ from __future__ import annotations import itertools -import os import re import time from datetime import datetime @@ -45,7 +44,8 @@ wrapped_markdown, ) from airflow.www.widgets import AirflowDateTimePickerROWidget, BS3TextAreaROWidget, BS3TextFieldROWidget -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -712,7 +712,3 @@ class TestForm(FlaskForm): assert 'readonly="true"' in html_output assert "form-control" in html_output - - -def is_db_isolation_mode(): - return os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true" diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py index a4b9145797c9..faa1cc23a8d3 100644 --- a/tests/www/views/conftest.py +++ b/tests/www/views/conftest.py @@ -27,10 +27,15 @@ from airflow import settings from airflow.models import DagBag from airflow.www.app import create_app -from tests.test_utils.api_connexion_utils import delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login, client_without_login, client_without_login_as_admin + +from dev.tests_common.test_utils.api_connexion_utils import delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import ( + client_with_login, + client_without_login, + client_without_login_as_admin, +) @pytest.fixture(autouse=True, scope="module") diff --git a/tests/www/views/test_anonymous_as_admin_role.py b/tests/www/views/test_anonymous_as_admin_role.py index b7603d1eae5b..700f03f8e63c 100644 --- a/tests/www/views/test_anonymous_as_admin_role.py +++ b/tests/www/views/test_anonymous_as_admin_role.py @@ -23,7 +23,8 @@ from airflow.models import Pool from airflow.utils.session import create_session -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.www import check_content_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_session.py b/tests/www/views/test_session.py index 0ec219aaeb4b..009917606bda 100644 --- a/tests/www/views/test_session.py +++ b/tests/www/views/test_session.py @@ -22,8 +22,9 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index b51fd3ab1c0a..bae022316c73 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -42,10 +42,11 @@ get_task_stats_from_query, get_value_from_path, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -329,7 +330,8 @@ def test_mark_task_instance_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - from tests.test_utils.db import clear_db_runs + + from dev.tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) @@ -422,7 +424,8 @@ def test_mark_task_group_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - from tests.test_utils.db import clear_db_runs + + from dev.tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) diff --git a/tests/www/views/test_views_acl.py b/tests/www/views/test_views_acl.py index 139644f67a6d..0f950a5d235f 100644 --- a/tests/www/views/test_views_acl.py +++ b/tests/www/views/test_views_acl.py @@ -30,11 +30,16 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType from airflow.www.views import FILTER_STATUS_COOKIE -from tests.test_utils.api_connexion_utils import create_user_scope -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs -from tests.test_utils.permissions import _resource_name -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.permissions import _resource_name +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user_scope if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_base.py b/tests/www/views/test_views_base.py index a125ca2d7283..692f0301e227 100644 --- a/tests/www/views/test_views_base.py +++ b/tests/www/views/test_views_base.py @@ -27,9 +27,10 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www import app as application -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_cluster_activity.py b/tests/www/views/test_views_cluster_activity.py index cdcebc1c8cf3..1d12fc883c42 100644 --- a/tests/www/views/test_views_cluster_activity.py +++ b/tests/www/views/test_views_cluster_activity.py @@ -26,7 +26,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_configuration.py b/tests/www/views/test_views_configuration.py index e874cf326791..90ab9b7faa93 100644 --- a/tests/www/views/test_views_configuration.py +++ b/tests/www/views/test_views_configuration.py @@ -21,8 +21,9 @@ import pytest from airflow.configuration import conf -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_connection.py b/tests/www/views/test_views_connection.py index a209cdfc2be8..2e4c2d6707a4 100644 --- a/tests/www/views/test_views_connection.py +++ b/tests/www/views/test_views_connection.py @@ -27,7 +27,12 @@ from airflow.models import Connection from airflow.utils.session import create_session from airflow.www.views import ConnectionFormWidget, ConnectionModelView -from tests.test_utils.www import _check_last_log, _check_last_log_masked_connection, check_content_in_response + +from dev.tests_common.test_utils.www import ( + _check_last_log, + _check_last_log_masked_connection, + check_content_in_response, +) pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_custom_user_views.py b/tests/www/views/test_views_custom_user_views.py index 84947a8e5f36..49ecc2c4f30b 100644 --- a/tests/www/views/test_views_custom_user_views.py +++ b/tests/www/views/test_views_custom_user_views.py @@ -27,11 +27,16 @@ from airflow import settings from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user as create_user, + +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( + create_user, delete_role, ) -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_dagrun.py b/tests/www/views/test_views_dagrun.py index d95955246ac7..1b7cfe171fee 100644 --- a/tests/www/views/test_views_dagrun.py +++ b/tests/www/views/test_views_dagrun.py @@ -24,14 +24,19 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www.views import DagRunModelView -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from tests.www.views.test_views_tasks import _get_appbuilder_pk_string + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login -from tests.www.views.test_views_tasks import _get_appbuilder_pk_string if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_dataset.py b/tests/www/views/test_views_dataset.py index 3d3351bb6493..1e0640db0922 100644 --- a/tests/www/views/test_views_dataset.py +++ b/tests/www/views/test_views_dataset.py @@ -24,8 +24,9 @@ from airflow.assets import Asset from airflow.models.asset import AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_assets + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_assets pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_decorators.py b/tests/www/views/test_views_decorators.py index e1bc1db0efc4..0bbaa6d9ea1c 100644 --- a/tests/www/views/test_views_decorators.py +++ b/tests/www/views/test_views_decorators.py @@ -23,9 +23,14 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_variables -from tests.test_utils.www import _check_last_log, _check_last_log_masked_variable, check_content_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_variables +from dev.tests_common.test_utils.www import ( + _check_last_log, + _check_last_log_masked_variable, + check_content_in_response, +) if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index 55b2424f3a9f..ad8219daebb9 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -28,9 +28,10 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from tests.test_utils.db import clear_db_runs -from tests.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index b4dd6f6082e5..6fbeb832f199 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -35,9 +35,10 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType from airflow.www.views import dag_to_grid -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 44dda24feecb..7969a5d05b0c 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -27,10 +27,15 @@ from airflow.utils.state import State from airflow.www.utils import UIAlert from airflow.www.views import FILTER_LASTRUN_COOKIE, FILTER_STATUS_COOKIE, FILTER_TAGS_COOKIE -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user -from tests.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags -from tests.test_utils.permissions import _resource_name -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags +from dev.tests_common.test_utils.permissions import _resource_name +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index f59fe7418f39..b562c5d3d9bf 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -40,11 +40,12 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType from airflow.www.app import create_app -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import client_with_login if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_mount.py b/tests/www/views/test_views_mount.py index f0c052294b60..e58675439ec1 100644 --- a/tests/www/views/test_views_mount.py +++ b/tests/www/views/test_views_mount.py @@ -22,7 +22,8 @@ import werkzeug.wrappers from airflow.www.app import create_app -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_paused.py b/tests/www/views/test_views_paused.py index 46b0a3aa03f1..551cdd180e07 100644 --- a/tests/www/views/test_views_paused.py +++ b/tests/www/views/test_views_paused.py @@ -19,7 +19,8 @@ import pytest from airflow.models.log import Log -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_dags pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_pool.py b/tests/www/views/test_views_pool.py index 3fcacbbbf8be..5a101af35d1f 100644 --- a/tests/www/views/test_views_pool.py +++ b/tests/www/views/test_views_pool.py @@ -23,7 +23,8 @@ from airflow.models import Pool from airflow.utils.session import create_session -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rate_limit.py b/tests/www/views/test_views_rate_limit.py index 1f3eaf11d2ad..d843c8ed54b0 100644 --- a/tests/www/views/test_views_rate_limit.py +++ b/tests/www/views/test_views_rate_limit.py @@ -20,9 +20,10 @@ import pytest from airflow.www.app import create_app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import client_with_login pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index 2d1754af29f6..9cc4a590515a 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -34,10 +34,15 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.conftest import initial_db_init -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_runs, + clear_rendered_ti_fields, + initial_db_init, +) +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_robots.py b/tests/www/views/test_views_robots.py index 03d8547c04d4..41f963b3bf21 100644 --- a/tests/www/views/test_views_robots.py +++ b/tests/www/views/test_views_robots.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.config import conf_vars +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_task_norun.py b/tests/www/views/test_views_task_norun.py index 2a39b2a60134..2ff6ff9d7cd6 100644 --- a/tests/www/views/test_views_task_norun.py +++ b/tests/www/views/test_views_task_norun.py @@ -22,7 +22,7 @@ import pytest -from tests.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 4dcb7252a365..4e9de5c00fc9 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -43,15 +43,20 @@ from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType from airflow.www.views import TaskInstanceModelView, _safe_parse_datetime -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs, clear_db_xcom -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_trigger_dag.py b/tests/www/views/test_views_trigger_dag.py index 0c9384a195f5..d2c3817ede6d 100644 --- a/tests/www/views/test_views_trigger_dag.py +++ b/tests/www/views/test_views_trigger_dag.py @@ -32,9 +32,10 @@ from airflow.utils.json import WebEncoder from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_test_client -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_test_client pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_variable.py b/tests/www/views/test_views_variable.py index b7fa8b37c52c..dae1f2e9b731 100644 --- a/tests/www/views/test_views_variable.py +++ b/tests/www/views/test_views_variable.py @@ -25,13 +25,14 @@ from airflow.models import Variable from airflow.security import permissions from airflow.utils.session import create_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user -from tests.test_utils.www import ( + +from dev.tests_common.test_utils.www import ( _check_last_log, check_content_in_response, check_content_not_in_response, client_with_login, ) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test VARIABLE = {