diff --git a/bigquery_etl/query_scheduling/generate_airflow_dags.py b/bigquery_etl/query_scheduling/generate_airflow_dags.py index e0757fb7ffd..e126f9ab8eb 100644 --- a/bigquery_etl/query_scheduling/generate_airflow_dags.py +++ b/bigquery_etl/query_scheduling/generate_airflow_dags.py @@ -102,7 +102,9 @@ def get_dags(project_id, dags_config, sql_dir=None): dag_collection=dag_collection, ) ) - tasks.append(bigeye_task) + + if bigeye_task.monitoring_enabled: + tasks.append(bigeye_task) if CHECKS_FILE in files: checks_file = os.path.join(root, CHECKS_FILE) diff --git a/bigquery_etl/query_scheduling/task.py b/bigquery_etl/query_scheduling/task.py index 167cfb0b2f0..70002c63b1a 100644 --- a/bigquery_etl/query_scheduling/task.py +++ b/bigquery_etl/query_scheduling/task.py @@ -337,6 +337,7 @@ class Task: node_selector: Optional[Dict[str, str]] = attr.ib(None) startup_timeout_seconds: Optional[int] = attr.ib(None) secrets: Optional[List[Secret]] = attr.ib(None) + monitoring_enabled: Optional[bool] = attr.ib(False) @property def task_key(self): @@ -489,6 +490,10 @@ def of_query(cls, query_file, metadata=None, dag_collection=None): # expose secret config task_config["secrets"] = metadata.scheduling.get("secrets", []) + # to determine if BigEye task should be generated + if metadata.monitoring: + task_config["monitoring_enabled"] = metadata.monitoring.enabled + # data processed in task should be published if metadata.is_public_json(): task_config["public_json"] = True diff --git a/tests/data/test_sql/moz-fx-data-test-project/test/python_script_query_v1/metadata.yaml b/tests/data/test_sql/moz-fx-data-test-project/test/python_script_query_v1/metadata.yaml index 44e50355c48..5500f8bedb7 100644 --- a/tests/data/test_sql/moz-fx-data-test-project/test/python_script_query_v1/metadata.yaml +++ b/tests/data/test_sql/moz-fx-data-test-project/test/python_script_query_v1/metadata.yaml @@ -10,3 +10,5 @@ scheduling: dag_name: "bqetl_core" depends_on_past: true arguments: ["--date", "{{ds}}"] +monitoring: + enabled: true