From 978bf0e4655a57bcd62d68af0a3ca2490d4dac9c Mon Sep 17 00:00:00 2001 From: Michal Charemza Date: Sat, 22 Jul 2023 16:53:20 +0100 Subject: [PATCH 1/2] Allow FileTaskHandler to delegate to instances of logging.Handler FileTaskHandler is the base class for logging handlers, including those that don't log to files via delegating to logging.FileHandler, e.g. in the CloudwatchTaskHandler at https://github.com/apache/airflow/blob/2940b9fa55a6a72c60c2162e541631addec3d6b8/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py#L67 It is suspected that type checking is not enabled in this part of the CloudwatchTaskHandler otherwise it would have already been failing. This change adjusts the base class so if type checking is enabled in the task handler, if it delegates to a logging.Handler that is not a logging.FileHandler as the CloudWatchHandler, then the type checking should pass. This was originally part of https://github.com/apache/airflow/pull/31855 and split out. related: https://github.com/apache/airflow/issues/31834 --- airflow/providers/microsoft/azure/log/wasb_task_handler.py | 2 ++ airflow/utils/log/file_task_handler.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 96c87219ca13..97a8af5ae1d6 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +import logging import os import shutil from functools import cached_property @@ -62,6 +63,7 @@ def __init__( **kwargs, ) -> None: super().__init__(base_log_folder, filename_template) + self.handler: logging.FileHandler | None = None self.wasb_container = wasb_container self.remote_base = wasb_log_folder self.log_relative_path = "" diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 5d791aaa0cca..fb540ed9ecb0 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -147,7 +147,7 @@ class FileTaskHandler(logging.Handler): def __init__(self, base_log_folder: str, filename_template: str | None = None): super().__init__() - self.handler: logging.FileHandler | None = None + self.handler: logging.Handler | None = None self.local_base = base_log_folder if filename_template is not None: warnings.warn( From 9ca7921248a0cdd7791e717d34bf8af943eb23d9 Mon Sep 17 00:00:00 2001 From: Michal Charemza Date: Mon, 12 Jun 2023 15:12:11 +0100 Subject: [PATCH 2/2] Add Redis task handler This stores log lines in Redis up to a configured maximum log lines, always keeping the most recent, up to a configured TTL. This deviates from other existing task handlers in that it accepts a connection ID. This allows it to be used in addition to other handlers, and so allows a graceful/reversible transition from one logging system to another. This is particularly useful in situations that use Redis as a message broker, where additional infrastructure isn't desired. closes: https://github.com/apache/airflow/issues/31834 --- airflow/providers/redis/log/__init__.py | 17 +++ .../providers/redis/log/redis_task_handler.py | 105 ++++++++++++++++++ airflow/providers/redis/provider.yaml | 3 + docs/apache-airflow-providers-redis/index.rst | 7 ++ .../logging/index.rst | 24 ++++ tests/providers/redis/log/__init__.py | 17 +++ .../redis/log/test_redis_task_handler.py | 94 ++++++++++++++++ 7 files changed, 267 insertions(+) create mode 100644 airflow/providers/redis/log/__init__.py create mode 100644 airflow/providers/redis/log/redis_task_handler.py create mode 100644 docs/apache-airflow-providers-redis/logging/index.rst create mode 100644 tests/providers/redis/log/__init__.py create mode 100644 tests/providers/redis/log/test_redis_task_handler.py diff --git a/airflow/providers/redis/log/__init__.py b/airflow/providers/redis/log/__init__.py new file mode 100644 index 000000000000..217e5db96078 --- /dev/null +++ b/airflow/providers/redis/log/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/redis/log/redis_task_handler.py b/airflow/providers/redis/log/redis_task_handler.py new file mode 100644 index 000000000000..b2e4a8fc16de --- /dev/null +++ b/airflow/providers/redis/log/redis_task_handler.py @@ -0,0 +1,105 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from functools import cached_property +from typing import Any + +from redis import Redis + +from airflow.configuration import conf +from airflow.models import TaskInstance +from airflow.providers.redis.hooks.redis import RedisHook +from airflow.utils.log.file_task_handler import FileTaskHandler +from airflow.utils.log.logging_mixin import LoggingMixin + + +class RedisTaskHandler(FileTaskHandler, LoggingMixin): + """ + RedisTaskHandler is a Python log handler that handles and reads task instance logs. + It extends airflow FileTaskHandler and uploads to and reads from Redis. + + :param base_log_folder: + base folder to store logs locally + :param max_lines: + Maximum number of lines of log to store + If omitted, this is 10000. + :param ttl_seconds: + Maximum number of seconds to store logs + If omitted, this is the equivalent of 28 days. + :param conn_id: + Airflow connection ID for the Redis hook to use + If omitted or None, the ID specified in the option logging.remote_log_conn_id is used. + """ + + trigger_should_wrap = True + + def __init__( + self, + base_log_folder: str, + max_lines: int = 10000, + ttl_seconds: int = 60 * 60 * 24 * 28, + conn_id: str | None = None, + ): + super().__init__(base_log_folder) + self.handler: _RedisHandler | None = None + self.max_lines = max_lines + self.ttl_seconds = ttl_seconds + self.conn_id = conn_id if conn_id is not None else conf.get("logging", "REMOTE_LOG_CONN_ID") + + @cached_property + def conn(self): + return RedisHook(redis_conn_id=self.conn_id).get_conn() + + def _read( + self, + ti: TaskInstance, + try_number: int, + metadata: dict[str, Any] | None = None, + ): + log_str = b"\n".join( + self.conn.lrange(self._render_filename(ti, try_number), start=0, end=-1) + ).decode() + return log_str, {"end_of_log": True} + + def set_context(self, ti: TaskInstance): + super().set_context(ti) + self.handler = _RedisHandler( + self.conn, + key=self._render_filename(ti, ti.try_number), + max_lines=self.max_lines, + ttl_seconds=self.ttl_seconds, + ) + self.handler.setFormatter(self.formatter) + + +class _RedisHandler(logging.Handler): + def __init__(self, conn: Redis, key: str, max_lines: int, ttl_seconds: int): + super().__init__() + self.conn = conn + self.key = key + self.max_lines = max_lines + self.ttl_seconds = ttl_seconds + + def emit(self, record): + p = self.conn.pipeline() + p.rpush(self.key, self.format(record)) + p.ltrim(self.key, start=-self.max_lines, end=-1) + p.expire(self.key, time=self.ttl_seconds) + p.execute() diff --git a/airflow/providers/redis/provider.yaml b/airflow/providers/redis/provider.yaml index c679abe00e01..59becb7a6936 100644 --- a/airflow/providers/redis/provider.yaml +++ b/airflow/providers/redis/provider.yaml @@ -64,3 +64,6 @@ hooks: connection-types: - hook-class-name: airflow.providers.redis.hooks.redis.RedisHook connection-type: redis + +logging: + - airflow.providers.redis.redis_task_handler.RedisTaskHandler diff --git a/docs/apache-airflow-providers-redis/index.rst b/docs/apache-airflow-providers-redis/index.rst index 02bc13257f5d..d33b628b1a3e 100644 --- a/docs/apache-airflow-providers-redis/index.rst +++ b/docs/apache-airflow-providers-redis/index.rst @@ -29,6 +29,13 @@ Changelog Security +.. toctree:: + :hidden: + :maxdepth: 1 + :caption: Guides + + Logging + .. toctree:: :hidden: :maxdepth: 1 diff --git a/docs/apache-airflow-providers-redis/logging/index.rst b/docs/apache-airflow-providers-redis/logging/index.rst new file mode 100644 index 000000000000..6dea5fff2b99 --- /dev/null +++ b/docs/apache-airflow-providers-redis/logging/index.rst @@ -0,0 +1,24 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _write-logs-redis: + +Writing logs to Redis +--------------------- + +Airflow can be configured to store log lines in Redis up to a configured maximum log lines, always keeping the most recent, up to a configured TTL. This deviates from other existing task handlers in that it accepts a connection ID. +This allows it to be used in addition to other handlers, and so allows a graceful/reversible transition from one logging system to another. This is particularly useful in situations that use Redis as a message broker, where additional infrastructure isn't desired. diff --git a/tests/providers/redis/log/__init__.py b/tests/providers/redis/log/__init__.py new file mode 100644 index 000000000000..217e5db96078 --- /dev/null +++ b/tests/providers/redis/log/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/redis/log/test_redis_task_handler.py b/tests/providers/redis/log/test_redis_task_handler.py new file mode 100644 index 000000000000..3dc481074dd3 --- /dev/null +++ b/tests/providers/redis/log/test_redis_task_handler.py @@ -0,0 +1,94 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +from unittest.mock import patch + +import pytest + +from airflow.models import DAG, DagRun, TaskInstance +from airflow.operators.empty import EmptyOperator +from airflow.providers.redis.log.redis_task_handler import RedisTaskHandler +from airflow.utils.session import create_session +from airflow.utils.state import State +from airflow.utils.timezone import datetime +from tests.test_utils.config import conf_vars + + +class TestRedisTaskHandler: + @pytest.fixture + def ti(self): + date = datetime(2020, 1, 1) + dag = DAG(dag_id="dag_for_testing_redis_task_handler", start_date=date) + task = EmptyOperator(task_id="task_for_testing_redis_log_handler", dag=dag) + dag_run = DagRun(dag_id=dag.dag_id, execution_date=date, run_id="test", run_type="scheduled") + + with create_session() as session: + session.add(dag_run) + session.commit() + session.refresh(dag_run) + + ti = TaskInstance(task=task, run_id=dag_run.run_id) + ti.dag_run = dag_run + ti.try_number = 1 + ti.state = State.RUNNING + + yield ti + + with create_session() as session: + session.query(DagRun).delete() + + @conf_vars({("logging", "remote_log_conn_id"): "redis_default"}) + def test_write(self, ti): + handler = RedisTaskHandler("any", max_lines=5, ttl_seconds=2) + handler.set_context(ti) + logger = logging.getLogger(__name__) + logger.addHandler(handler) + + key = ( + "dag_id=dag_for_testing_redis_task_handler/run_id=test" + + "/task_id=task_for_testing_redis_log_handler/attempt=1.log" + ) + + with patch("redis.Redis.pipeline") as pipeline: + logger.info("Test log event") + + pipeline.return_value.rpush.assert_called_once_with(key, "Test log event") + pipeline.return_value.ltrim.assert_called_once_with(key, start=-5, end=-1) + pipeline.return_value.expire.assert_called_once_with(key, time=2) + pipeline.return_value.execute.assert_called_once_with() + + @conf_vars({("logging", "remote_log_conn_id"): "redis_default"}) + def test_read(self, ti): + handler = RedisTaskHandler("any") + handler.set_context(ti) + logger = logging.getLogger(__name__) + logger.addHandler(handler) + + key = ( + "dag_id=dag_for_testing_redis_task_handler/run_id=test" + + "/task_id=task_for_testing_redis_log_handler/attempt=1.log" + ) + + with patch("redis.Redis.lrange") as lrange: + lrange.return_value = [b"Line 1", b"Line 2"] + logs = handler.read(ti) + + assert logs == ([[("", "Line 1\nLine 2")]], [{"end_of_log": True}]) + lrange.assert_called_once_with(key, start=0, end=-1)