diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index e05ad770a470..4eebf9967cd2 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -815,7 +815,7 @@ def _execute(self) -> int | None: processor_timeout_seconds: int = conf.getint("core", "dag_file_processor_timeout") processor_timeout = timedelta(seconds=processor_timeout_seconds) - if not self._standalone_dag_processor: + if not self._standalone_dag_processor and not self.processor_agent: self.processor_agent = DagFileProcessorAgent( dag_directory=Path(self.subdir), max_runs=self.num_times_parse_dags, diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 195b9fce5e52..5e83c77a6463 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -3034,11 +3034,6 @@ def run_with_error(ti, ignore_ti_state=False): ti.refresh_from_db() assert ti.state == State.SUCCESS - # TODO: Investigate super-mysterious behaviour of this test hanging for sqlite. This test started - # To fail ONLY on sqlite but only on self-hosted runners and locally (not on public runners) - # We should uncomment it when we figure out what's going on - # Issue: https://github.com/apache/airflow/issues/35204 - @pytest.mark.backend("mssql", "mysql", "postgres") def test_retry_handling_job(self): """ Integration test of the scheduler not accidentally resetting