Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Redis task handler #31855

Merged
merged 2 commits into from
Jul 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions airflow/providers/microsoft/azure/log/wasb_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# under the License.
from __future__ import annotations

import logging
import os
import shutil
from functools import cached_property
Expand Down Expand Up @@ -62,6 +63,7 @@ def __init__(
**kwargs,
) -> None:
super().__init__(base_log_folder, filename_template)
self.handler: logging.FileHandler | None = None
self.wasb_container = wasb_container
self.remote_base = wasb_log_folder
self.log_relative_path = ""
Expand Down
17 changes: 17 additions & 0 deletions airflow/providers/redis/log/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
105 changes: 105 additions & 0 deletions airflow/providers/redis/log/redis_task_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import logging
from functools import cached_property
from typing import Any

from redis import Redis

from airflow.configuration import conf
from airflow.models import TaskInstance
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin


class RedisTaskHandler(FileTaskHandler, LoggingMixin):
"""
RedisTaskHandler is a Python log handler that handles and reads task instance logs.
It extends airflow FileTaskHandler and uploads to and reads from Redis.

:param base_log_folder:
base folder to store logs locally
:param max_lines:
Maximum number of lines of log to store
If omitted, this is 10000.
:param ttl_seconds:
Maximum number of seconds to store logs
If omitted, this is the equivalent of 28 days.
:param conn_id:
Airflow connection ID for the Redis hook to use
If omitted or None, the ID specified in the option logging.remote_log_conn_id is used.
"""

trigger_should_wrap = True

def __init__(
self,
base_log_folder: str,
max_lines: int = 10000,
ttl_seconds: int = 60 * 60 * 24 * 28,
conn_id: str | None = None,
):
super().__init__(base_log_folder)
self.handler: _RedisHandler | None = None
self.max_lines = max_lines
self.ttl_seconds = ttl_seconds
self.conn_id = conn_id if conn_id is not None else conf.get("logging", "REMOTE_LOG_CONN_ID")

@cached_property
def conn(self):
return RedisHook(redis_conn_id=self.conn_id).get_conn()

def _read(
self,
ti: TaskInstance,
try_number: int,
metadata: dict[str, Any] | None = None,
):
log_str = b"\n".join(
self.conn.lrange(self._render_filename(ti, try_number), start=0, end=-1)
).decode()
return log_str, {"end_of_log": True}

def set_context(self, ti: TaskInstance):
super().set_context(ti)
self.handler = _RedisHandler(
self.conn,
key=self._render_filename(ti, ti.try_number),
max_lines=self.max_lines,
ttl_seconds=self.ttl_seconds,
)
self.handler.setFormatter(self.formatter)


class _RedisHandler(logging.Handler):
def __init__(self, conn: Redis, key: str, max_lines: int, ttl_seconds: int):
super().__init__()
self.conn = conn
self.key = key
self.max_lines = max_lines
self.ttl_seconds = ttl_seconds

def emit(self, record):
p = self.conn.pipeline()
p.rpush(self.key, self.format(record))
p.ltrim(self.key, start=-self.max_lines, end=-1)
p.expire(self.key, time=self.ttl_seconds)
p.execute()
3 changes: 3 additions & 0 deletions airflow/providers/redis/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,6 @@ hooks:
connection-types:
- hook-class-name: airflow.providers.redis.hooks.redis.RedisHook
connection-type: redis

logging:
- airflow.providers.redis.redis_task_handler.RedisTaskHandler
2 changes: 1 addition & 1 deletion airflow/utils/log/file_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ class FileTaskHandler(logging.Handler):

def __init__(self, base_log_folder: str, filename_template: str | None = None):
super().__init__()
self.handler: logging.FileHandler | None = None
self.handler: logging.Handler | None = None
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change look out of place?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is true - but it allows the lower level handler, _RedisHandler to not inherit from logging.FileHandler, which I think is appropriate since there is no file involved(?)

It looks like the codebase already doesn't enforce that the lower level handler extends from logging.FileHandler -

self.handler = watchtower.CloudWatchLogHandler(
which is defined in https://github.com/kislyuk/watchtower/blob/698ff2241befb6586269b6bbce0e305032c2d45c/watchtower/__init__.py#L139

This touches on the bits that I'm unsure about - it looks like FileTaskHandler has a lot of file-related things, but also it looks like it's expected to extend from it when not logging to a file. Not quite sure how to tackle that

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should have been more specific initially. :)

What I meant was that maybe this change is not required by this PR? since you have anyway overridden the self.handler in class RedisTaskHandler. Also, FileTaskHandler is used in many places, and changing it here might affect those implementations.

About inheriting from FileTaskHandler I'm not sure either, maybe @bolkedebruin might have more context.

Copy link
Contributor Author

@michalc michalc Jun 14, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What I meant was that maybe this change is not required by this PR? since you have anyway overridden the self.handler in class RedisTaskHandler

Ah - so if I change it back to how it was before this PR, I get a type error:

airflow/providers/redis/log/redis_task_handler.py:61: error: Incompatible types
in assignment (expression has type "Optional[_RedisHandler]", base class
"FileTaskHandler" defined the type as "Optional[FileHandler]")  [assignment]

So something else would have to change too. The easiest way that I've come up with is to de-facto disable type checking on self.handler by removing type annotations in a few places, to make it more like the Cloudwatch handler, but that seems a bit... cheat-y.

_RedisHandler could really inherit from logging.FileHandler to satisfy type checking, but its constructor seems to require a path to a file, which seems odd in this case.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change is OK to me since the base class only really needs to inner handler to be a logging.Handler. It could be moved to a separate PR but since this is where the change becomes necessary (instead of just a generally good thing to do) it’s fine to be included here.

Copy link
Contributor

@eladkal eladkal Jun 27, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If this specific change is not tightly coupled to this PR I would suggest to do it in a separated PR.
this is because utils/log goes to core release cycle while the other files goes to provider release cycle.
@michalc Is it possible to separate?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@michalc can you check my above comment?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@eladkal Sounds reasonable to separate to another PR. Will do that now

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@eladkal Done in #32773

self.local_base = base_log_folder
if filename_template is not None:
warnings.warn(
Expand Down
7 changes: 7 additions & 0 deletions docs/apache-airflow-providers-redis/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,13 @@
Changelog <changelog>
Security <security>

.. toctree::
:hidden:
:maxdepth: 1
:caption: Guides

Logging <logging/index>

.. toctree::
:hidden:
:maxdepth: 1
Expand Down
24 changes: 24 additions & 0 deletions docs/apache-airflow-providers-redis/logging/index.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

.. http://www.apache.org/licenses/LICENSE-2.0

.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.

.. _write-logs-redis:

Writing logs to Redis
---------------------

Airflow can be configured to store log lines in Redis up to a configured maximum log lines, always keeping the most recent, up to a configured TTL. This deviates from other existing task handlers in that it accepts a connection ID.
This allows it to be used in addition to other handlers, and so allows a graceful/reversible transition from one logging system to another. This is particularly useful in situations that use Redis as a message broker, where additional infrastructure isn't desired.
17 changes: 17 additions & 0 deletions tests/providers/redis/log/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
94 changes: 94 additions & 0 deletions tests/providers/redis/log/test_redis_task_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import logging
from unittest.mock import patch

import pytest

from airflow.models import DAG, DagRun, TaskInstance
from airflow.operators.empty import EmptyOperator
from airflow.providers.redis.log.redis_task_handler import RedisTaskHandler
from airflow.utils.session import create_session
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars


class TestRedisTaskHandler:
@pytest.fixture
def ti(self):
date = datetime(2020, 1, 1)
dag = DAG(dag_id="dag_for_testing_redis_task_handler", start_date=date)
task = EmptyOperator(task_id="task_for_testing_redis_log_handler", dag=dag)
dag_run = DagRun(dag_id=dag.dag_id, execution_date=date, run_id="test", run_type="scheduled")

with create_session() as session:
session.add(dag_run)
session.commit()
session.refresh(dag_run)

ti = TaskInstance(task=task, run_id=dag_run.run_id)
ti.dag_run = dag_run
ti.try_number = 1
ti.state = State.RUNNING

yield ti

with create_session() as session:
session.query(DagRun).delete()

@conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
def test_write(self, ti):
handler = RedisTaskHandler("any", max_lines=5, ttl_seconds=2)
handler.set_context(ti)
logger = logging.getLogger(__name__)
logger.addHandler(handler)

key = (
"dag_id=dag_for_testing_redis_task_handler/run_id=test"
+ "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
)

with patch("redis.Redis.pipeline") as pipeline:
logger.info("Test log event")

pipeline.return_value.rpush.assert_called_once_with(key, "Test log event")
pipeline.return_value.ltrim.assert_called_once_with(key, start=-5, end=-1)
pipeline.return_value.expire.assert_called_once_with(key, time=2)
pipeline.return_value.execute.assert_called_once_with()

@conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
def test_read(self, ti):
handler = RedisTaskHandler("any")
handler.set_context(ti)
logger = logging.getLogger(__name__)
logger.addHandler(handler)

key = (
"dag_id=dag_for_testing_redis_task_handler/run_id=test"
+ "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
)

with patch("redis.Redis.lrange") as lrange:
lrange.return_value = [b"Line 1", b"Line 2"]
logs = handler.read(ti)

assert logs == ([[("", "Line 1\nLine 2")]], [{"end_of_log": True}])
lrange.assert_called_once_with(key, start=0, end=-1)