This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 42b4b43c4c Add Redis task handler (#31855)
42b4b43c4c is described below

commit 42b4b43c4c2ccf0b6e7eaa105c982df495768d01
Author: Michal Charemza <[email protected]>
AuthorDate: Sun Jul 23 07:43:35 2023 +0100

    Add Redis task handler (#31855)
    
    * Allow FileTaskHandler to delegate to instances of logging.Handler
    
    FileTaskHandler is the base class for logging handlers, including those that
    don't log to files via delegating to logging.FileHandler, e.g. in the
    CloudwatchTaskHandler at
    
https://github.com/apache/airflow/blob/2940b9fa55a6a72c60c2162e541631addec3d6b8/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py#L67
    
    It is suspected that type checking is not enabled in this part of the
    CloudwatchTaskHandler otherwise it would have already been failing.
    
    This change adjusts the base class so if type checking is enabled in the 
task
    handler, if it delegates to a logging.Handler that is not a 
logging.FileHandler
    as the CloudWatchHandler, then the type checking should pass.
    
    This was originally part of https://github.com/apache/airflow/pull/31855 and
    split out.
    
    related: https://github.com/apache/airflow/issues/31834
    
    * Add Redis task handler
    
    This stores log lines in Redis up to a configured maximum log lines, always
    keeping the most recent, up to a configured TTL.
    
    This deviates from other existing task handlers in that it accepts a 
connection
    ID. This allows it to be used in addition to other handlers, and so allows a
    graceful/reversible transition from one logging system to another.
    
    This is particularly useful in situations that use Redis as a message 
broker,
    where additional infrastructure isn't desired.
    
    closes: https://github.com/apache/airflow/issues/31834
---
 .../microsoft/azure/log/wasb_task_handler.py       |   2 +
 airflow/providers/redis/log/__init__.py            |  17 ++++
 airflow/providers/redis/log/redis_task_handler.py  | 105 +++++++++++++++++++++
 airflow/providers/redis/provider.yaml              |   3 +
 airflow/utils/log/file_task_handler.py             |   2 +-
 docs/apache-airflow-providers-redis/index.rst      |   7 ++
 .../logging/index.rst                              |  24 +++++
 tests/providers/redis/log/__init__.py              |  17 ++++
 .../providers/redis/log/test_redis_task_handler.py |  94 ++++++++++++++++++
 9 files changed, 270 insertions(+), 1 deletion(-)

diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py 
b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
index 96c87219ca..97a8af5ae1 100644
--- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py
+++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
@@ -17,6 +17,7 @@
 # under the License.
 from __future__ import annotations
 
+import logging
 import os
 import shutil
 from functools import cached_property
@@ -62,6 +63,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
         **kwargs,
     ) -> None:
         super().__init__(base_log_folder, filename_template)
+        self.handler: logging.FileHandler | None = None
         self.wasb_container = wasb_container
         self.remote_base = wasb_log_folder
         self.log_relative_path = ""
diff --git a/airflow/providers/redis/log/__init__.py 
b/airflow/providers/redis/log/__init__.py
new file mode 100644
index 0000000000..217e5db960
--- /dev/null
+++ b/airflow/providers/redis/log/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/redis/log/redis_task_handler.py 
b/airflow/providers/redis/log/redis_task_handler.py
new file mode 100644
index 0000000000..b2e4a8fc16
--- /dev/null
+++ b/airflow/providers/redis/log/redis_task_handler.py
@@ -0,0 +1,105 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import logging
+from functools import cached_property
+from typing import Any
+
+from redis import Redis
+
+from airflow.configuration import conf
+from airflow.models import TaskInstance
+from airflow.providers.redis.hooks.redis import RedisHook
+from airflow.utils.log.file_task_handler import FileTaskHandler
+from airflow.utils.log.logging_mixin import LoggingMixin
+
+
+class RedisTaskHandler(FileTaskHandler, LoggingMixin):
+    """
+    RedisTaskHandler is a Python log handler that handles and reads task 
instance logs.
+    It extends airflow FileTaskHandler and uploads to and reads from Redis.
+
+    :param base_log_folder:
+        base folder to store logs locally
+    :param max_lines:
+        Maximum number of lines of log to store
+        If omitted, this is 10000.
+    :param ttl_seconds:
+        Maximum number of seconds to store logs
+        If omitted, this is the equivalent of 28 days.
+    :param conn_id:
+        Airflow connection ID for the Redis hook to use
+        If omitted or None, the ID specified in the option 
logging.remote_log_conn_id is used.
+    """
+
+    trigger_should_wrap = True
+
+    def __init__(
+        self,
+        base_log_folder: str,
+        max_lines: int = 10000,
+        ttl_seconds: int = 60 * 60 * 24 * 28,
+        conn_id: str | None = None,
+    ):
+        super().__init__(base_log_folder)
+        self.handler: _RedisHandler | None = None
+        self.max_lines = max_lines
+        self.ttl_seconds = ttl_seconds
+        self.conn_id = conn_id if conn_id is not None else conf.get("logging", 
"REMOTE_LOG_CONN_ID")
+
+    @cached_property
+    def conn(self):
+        return RedisHook(redis_conn_id=self.conn_id).get_conn()
+
+    def _read(
+        self,
+        ti: TaskInstance,
+        try_number: int,
+        metadata: dict[str, Any] | None = None,
+    ):
+        log_str = b"\n".join(
+            self.conn.lrange(self._render_filename(ti, try_number), start=0, 
end=-1)
+        ).decode()
+        return log_str, {"end_of_log": True}
+
+    def set_context(self, ti: TaskInstance):
+        super().set_context(ti)
+        self.handler = _RedisHandler(
+            self.conn,
+            key=self._render_filename(ti, ti.try_number),
+            max_lines=self.max_lines,
+            ttl_seconds=self.ttl_seconds,
+        )
+        self.handler.setFormatter(self.formatter)
+
+
+class _RedisHandler(logging.Handler):
+    def __init__(self, conn: Redis, key: str, max_lines: int, ttl_seconds: 
int):
+        super().__init__()
+        self.conn = conn
+        self.key = key
+        self.max_lines = max_lines
+        self.ttl_seconds = ttl_seconds
+
+    def emit(self, record):
+        p = self.conn.pipeline()
+        p.rpush(self.key, self.format(record))
+        p.ltrim(self.key, start=-self.max_lines, end=-1)
+        p.expire(self.key, time=self.ttl_seconds)
+        p.execute()
diff --git a/airflow/providers/redis/provider.yaml 
b/airflow/providers/redis/provider.yaml
index c679abe00e..59becb7a69 100644
--- a/airflow/providers/redis/provider.yaml
+++ b/airflow/providers/redis/provider.yaml
@@ -64,3 +64,6 @@ hooks:
 connection-types:
   - hook-class-name: airflow.providers.redis.hooks.redis.RedisHook
     connection-type: redis
+
+logging:
+  - airflow.providers.redis.redis_task_handler.RedisTaskHandler
diff --git a/airflow/utils/log/file_task_handler.py 
b/airflow/utils/log/file_task_handler.py
index 5d791aaa0c..fb540ed9ec 100644
--- a/airflow/utils/log/file_task_handler.py
+++ b/airflow/utils/log/file_task_handler.py
@@ -147,7 +147,7 @@ class FileTaskHandler(logging.Handler):
 
     def __init__(self, base_log_folder: str, filename_template: str | None = 
None):
         super().__init__()
-        self.handler: logging.FileHandler | None = None
+        self.handler: logging.Handler | None = None
         self.local_base = base_log_folder
         if filename_template is not None:
             warnings.warn(
diff --git a/docs/apache-airflow-providers-redis/index.rst 
b/docs/apache-airflow-providers-redis/index.rst
index 02bc13257f..d33b628b1a 100644
--- a/docs/apache-airflow-providers-redis/index.rst
+++ b/docs/apache-airflow-providers-redis/index.rst
@@ -29,6 +29,13 @@
     Changelog <changelog>
     Security <security>
 
+.. toctree::
+    :hidden:
+    :maxdepth: 1
+    :caption: Guides
+
+    Logging <logging/index>
+
 .. toctree::
     :hidden:
     :maxdepth: 1
diff --git a/docs/apache-airflow-providers-redis/logging/index.rst 
b/docs/apache-airflow-providers-redis/logging/index.rst
new file mode 100644
index 0000000000..6dea5fff2b
--- /dev/null
+++ b/docs/apache-airflow-providers-redis/logging/index.rst
@@ -0,0 +1,24 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+.. _write-logs-redis:
+
+Writing logs to Redis
+---------------------
+
+Airflow can be configured to store log lines in Redis up to a configured 
maximum log lines, always keeping the most recent, up to a configured TTL. This 
deviates from other existing task handlers in that it accepts a connection ID.
+This allows it to be used in addition to other handlers, and so allows a 
graceful/reversible transition from one logging system to another. This is 
particularly useful in situations that use Redis as a message broker, where 
additional infrastructure isn't desired.
diff --git a/tests/providers/redis/log/__init__.py 
b/tests/providers/redis/log/__init__.py
new file mode 100644
index 0000000000..217e5db960
--- /dev/null
+++ b/tests/providers/redis/log/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/redis/log/test_redis_task_handler.py 
b/tests/providers/redis/log/test_redis_task_handler.py
new file mode 100644
index 0000000000..3dc481074d
--- /dev/null
+++ b/tests/providers/redis/log/test_redis_task_handler.py
@@ -0,0 +1,94 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import logging
+from unittest.mock import patch
+
+import pytest
+
+from airflow.models import DAG, DagRun, TaskInstance
+from airflow.operators.empty import EmptyOperator
+from airflow.providers.redis.log.redis_task_handler import RedisTaskHandler
+from airflow.utils.session import create_session
+from airflow.utils.state import State
+from airflow.utils.timezone import datetime
+from tests.test_utils.config import conf_vars
+
+
+class TestRedisTaskHandler:
+    @pytest.fixture
+    def ti(self):
+        date = datetime(2020, 1, 1)
+        dag = DAG(dag_id="dag_for_testing_redis_task_handler", start_date=date)
+        task = EmptyOperator(task_id="task_for_testing_redis_log_handler", 
dag=dag)
+        dag_run = DagRun(dag_id=dag.dag_id, execution_date=date, 
run_id="test", run_type="scheduled")
+
+        with create_session() as session:
+            session.add(dag_run)
+            session.commit()
+            session.refresh(dag_run)
+
+        ti = TaskInstance(task=task, run_id=dag_run.run_id)
+        ti.dag_run = dag_run
+        ti.try_number = 1
+        ti.state = State.RUNNING
+
+        yield ti
+
+        with create_session() as session:
+            session.query(DagRun).delete()
+
+    @conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
+    def test_write(self, ti):
+        handler = RedisTaskHandler("any", max_lines=5, ttl_seconds=2)
+        handler.set_context(ti)
+        logger = logging.getLogger(__name__)
+        logger.addHandler(handler)
+
+        key = (
+            "dag_id=dag_for_testing_redis_task_handler/run_id=test"
+            + "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
+        )
+
+        with patch("redis.Redis.pipeline") as pipeline:
+            logger.info("Test log event")
+
+        pipeline.return_value.rpush.assert_called_once_with(key, "Test log 
event")
+        pipeline.return_value.ltrim.assert_called_once_with(key, start=-5, 
end=-1)
+        pipeline.return_value.expire.assert_called_once_with(key, time=2)
+        pipeline.return_value.execute.assert_called_once_with()
+
+    @conf_vars({("logging", "remote_log_conn_id"): "redis_default"})
+    def test_read(self, ti):
+        handler = RedisTaskHandler("any")
+        handler.set_context(ti)
+        logger = logging.getLogger(__name__)
+        logger.addHandler(handler)
+
+        key = (
+            "dag_id=dag_for_testing_redis_task_handler/run_id=test"
+            + "/task_id=task_for_testing_redis_log_handler/attempt=1.log"
+        )
+
+        with patch("redis.Redis.lrange") as lrange:
+            lrange.return_value = [b"Line 1", b"Line 2"]
+            logs = handler.read(ti)
+
+        assert logs == ([[("", "Line 1\nLine 2")]], [{"end_of_log": True}])
+        lrange.assert_called_once_with(key, start=0, end=-1)

Reply via email to