Skip to content

Commit

Permalink
Add Redis task handler (#31855)
Browse files Browse the repository at this point in the history
* Allow FileTaskHandler to delegate to instances of logging.Handler

FileTaskHandler is the base class for logging handlers, including those that
don't log to files via delegating to logging.FileHandler, e.g. in the
CloudwatchTaskHandler at
https://github.com/apache/airflow/blob/2940b9fa55a6a72c60c2162e541631addec3d6b8/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py#L67

It is suspected that type checking is not enabled in this part of the
CloudwatchTaskHandler otherwise it would have already been failing.

This change adjusts the base class so if type checking is enabled in the task
handler, if it delegates to a logging.Handler that is not a logging.FileHandler
as the CloudWatchHandler, then the type checking should pass.

This was originally part of apache/airflow#31855 and
split out.

related: apache/airflow#31834

* Add Redis task handler

This stores log lines in Redis up to a configured maximum log lines, always
keeping the most recent, up to a configured TTL.

This deviates from other existing task handlers in that it accepts a connection
ID. This allows it to be used in addition to other handlers, and so allows a
graceful/reversible transition from one logging system to another.

This is particularly useful in situations that use Redis as a message broker,
where additional infrastructure isn't desired.

closes: apache/airflow#31834
GitOrigin-RevId: 42b4b43c4c2ccf0b6e7eaa105c982df495768d01
  • Loading branch information
michalc authored and Cloud Composer Team committed May 15, 2024
1 parent 5c1c539 commit 0d4783b
Show file tree
Hide file tree
Showing 9 changed files with 270 additions and 1 deletion.
2 changes: 2 additions & 0 deletions airflow/providers/microsoft/azure/log/wasb_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# under the License.
from __future__ import annotations

import logging
import os
import shutil
from functools import cached_property
Expand Down Expand Up @@ -62,6 +63,7 @@ def __init__(
**kwargs,
) -> None:
super().__init__(base_log_folder, filename_template)
self.handler: logging.FileHandler | None = None
self.wasb_container = wasb_container
self.remote_base = wasb_log_folder
self.log_relative_path = ""
Expand Down
17 changes: 17 additions & 0 deletions airflow/providers/redis/log/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
105 changes: 105 additions & 0 deletions airflow/providers/redis/log/redis_task_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import logging
from functools import cached_property
from typing import Any

from redis import Redis

from airflow.configuration import conf
from airflow.models import TaskInstance
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin


class RedisTaskHandler(FileTaskHandler, LoggingMixin):
"""
RedisTaskHandler is a Python log handler that handles and reads task instance logs.
It extends airflow FileTaskHandler and uploads to and reads from Redis.
:param base_log_folder:
base folder to store logs locally
:param max_lines:
Maximum number of lines of log to store
If omitted, this is 10000.
:param ttl_seconds:
Maximum number of seconds to store logs
If omitted, this is the equivalent of 28 days.
:param conn_id:
Airflow connection ID for the Redis hook to use
If omitted or None, the ID specified in the option logging.remote_log_conn_id is used.
"""

trigger_should_wrap = True

def __init__(
self,
base_log_folder: str,
max_lines: int = 10000,
ttl_seconds: int = 60 * 60 * 24 * 28,
conn_id: str | None = None,
):
super().__init__(base_log_folder)
self.handler: _RedisHandler | None = None
self.max_lines = max_lines
self.ttl_seconds = ttl_seconds
self.conn_id = conn_id if conn_id is not None else conf.get("logging", "REMOTE_LOG_CONN_ID")

@cached_property
def conn(self):
return RedisHook(redis_conn_id=self.conn_id).get_conn()

def _read(
self,
ti: TaskInstance,
try_number: int,
metadata: dict[str, Any] | None = None,
):
log_str = b"\n".join(
self.conn.lrange(self._render_filename(ti, try_number), start=0, end=-1)
).decode()
return log_str, {"end_of_log": True}

def set_context(self, ti: TaskInstance):
super().set_context(ti)
self.handler = _RedisHandler(
self.conn,
key=self._render_filename(ti, ti.try_number),
max_lines=self.max_lines,
ttl_seconds=self.ttl_seconds,
)
self.handler.setFormatter(self.formatter)


class _RedisHandler(logging.Handler):
def __init__(self, conn: Redis, key: str, max_lines: int, ttl_seconds: int):
super().__init__()
self.conn = conn
self.key = key
self.max_lines = max_lines
self.ttl_seconds = ttl_seconds

def emit(self, record):
p = self.conn.pipeline()
p.rpush(self.key, self.format(record))
p.ltrim(self.key, start=-self.max_lines, end=-1)
p.expire(self.key, time=self.ttl_seconds)
p.execute()
3 changes: 3 additions & 0 deletions airflow/providers/redis/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,6 @@ hooks:
connection-types:
- hook-class-name: airflow.providers.redis.hooks.redis.RedisHook
connection-type: redis

logging:
- airflow.providers.redis.redis_task_handler.RedisTaskHandler
2 changes: 1 addition & 1 deletion airflow/utils/log/file_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ class FileTaskHandler(logging.Handler):

def __init__(self, base_log_folder: str, filename_template: str | None = None):
super().__init__()
self.handler: logging.FileHandler | None = None
self.handler: logging.Handler | None = None
self.local_base = base_log_folder
if filename_template is not None:
warnings.warn(
Expand Down
7 changes: 7 additions & 0 deletions docs/apache-airflow-providers-redis/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,13 @@
Changelog <changelog>
Security <security>

.. toctree::
:hidden:
:maxdepth: 1
:caption: Guides

Logging <logging/index>

.. toctree::
:hidden:
:maxdepth: 1
Expand Down
24 changes: 24 additions & 0 deletions docs/apache-airflow-providers-redis/logging/index.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
.. _write-logs-redis:

Writing logs to Redis
---------------------

Airflow can be configured to store log lines in Redis up to a configured maximum log lines, always keeping the most recent, up to a configured TTL. This deviates from other existing task handlers in that it accepts a connection ID.
This allows it to be used in addition to other handlers, and so allows a graceful/reversible transition from one logging system to another. This is particularly useful in situations that use Redis as a message broker, where additional infrastructure isn't desired.
17 changes: 17 additions & 0 deletions tests/providers/redis/log/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
94 changes: 94 additions & 0 deletions tests/providers/redis/log/test_redis_task_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import logging
from unittest.mock import patch

import pytest

from airflow.models import DAG, DagRun, TaskInstance
from airflow.operators.empty import EmptyOperator
from airflow.providers.redis.log.redis_task_handler import RedisTaskHandler
from airflow.utils.session import create_session
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars


class TestRedisTaskHandler:
@pytest.fixture
def ti(self):
date = datetime(2020, 1, 1)
dag = DAG(dag_id="dag_for_testing_redis_task_handler", start_date=date)
task = EmptyOperator(task_id="task_for_testing_redis_log_handler", dag=dag)
dag_run = DagRun(dag_id=dag.dag_id, execution_date=date, run_id="test", run_type="scheduled")

with create_session() as session:
session.add(dag_run)
session.commit()
session.refresh(dag_run)

ti = TaskInstance(task=task, run_id=dag_run.run_id)
ti.dag_run = dag_run
ti.try_number = 1
ti.state = State.RUNNING

yield ti

with create_session() as session:
session.query(DagRun).delete()

@conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
def test_write(self, ti):
handler = RedisTaskHandler("any", max_lines=5, ttl_seconds=2)
handler.set_context(ti)
logger = logging.getLogger(__name__)
logger.addHandler(handler)

key = (
"dag_id=dag_for_testing_redis_task_handler/run_id=test"
+ "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
)

with patch("redis.Redis.pipeline") as pipeline:
logger.info("Test log event")

pipeline.return_value.rpush.assert_called_once_with(key, "Test log event")
pipeline.return_value.ltrim.assert_called_once_with(key, start=-5, end=-1)
pipeline.return_value.expire.assert_called_once_with(key, time=2)
pipeline.return_value.execute.assert_called_once_with()

@conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
def test_read(self, ti):
handler = RedisTaskHandler("any")
handler.set_context(ti)
logger = logging.getLogger(__name__)
logger.addHandler(handler)

key = (
"dag_id=dag_for_testing_redis_task_handler/run_id=test"
+ "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
)

with patch("redis.Redis.lrange") as lrange:
lrange.return_value = [b"Line 1", b"Line 2"]
logs = handler.read(ti)

assert logs == ([[("", "Line 1\nLine 2")]], [{"end_of_log": True}])
lrange.assert_called_once_with(key, start=0, end=-1)

0 comments on commit 0d4783b

Please sign in to comment.