jscheffl commented on code in PR #42047:
URL: https://github.com/apache/airflow/pull/42047#discussion_r1761653907


##########
airflow/providers/edge/models/edge_logs.py:
##########
@@ -0,0 +1,153 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from datetime import datetime
+from functools import lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel, ConfigDict
+from sqlalchemy import (
+    Column,
+    Integer,
+    Text,
+    text,
+)
+from sqlalchemy.dialects.mysql import MEDIUMTEXT
+
+from airflow.api_internal.internal_api_call import internal_api_call
+from airflow.configuration import conf
+from airflow.models.base import Base, StringID
+from airflow.models.taskinstance import TaskInstance
+from airflow.models.taskinstancekey import TaskInstanceKey
+from airflow.serialization.serialized_objects import 
add_pydantic_class_type_mapping
+from airflow.utils.log.logging_mixin import LoggingMixin
+from airflow.utils.session import NEW_SESSION, provide_session
+from airflow.utils.sqlalchemy import UtcDateTime
+
+if TYPE_CHECKING:
+    from sqlalchemy.orm.session import Session
+
+
+class EdgeLogsModel(Base, LoggingMixin):
+    """
+    Temporary collected logs from a Edge Worker while job runs on remote site.
+
+    As the Edge Worker in most cases has a local file system and the web UI no 
access
+    to read files from remote site, Edge Workers will send incremental chunks 
of logs
+    of running jobs to the central site. As log storage backends in most cloud 
cases can not
+    append logs, the table is used as buffer to receive. Upon task completion 
logs can be
+    flushed to task log handler.
+
+    Log data therefore is collected in chunks and is only temporary.
+    """
+
+    __tablename__ = "edge_logs"
+    dag_id = Column(StringID(), primary_key=True, nullable=False)
+    task_id = Column(StringID(), primary_key=True, nullable=False)
+    run_id = Column(StringID(), primary_key=True, nullable=False)
+    map_index = Column(Integer, primary_key=True, nullable=False, 
server_default=text("-1"))
+    try_number = Column(Integer, primary_key=True, default=0)
+    log_chunk_time = Column(UtcDateTime, primary_key=True, nullable=False)
+    log_chunk_data = Column(Text().with_variant(MEDIUMTEXT(), "mysql"), 
nullable=False)
+
+    def __init__(
+        self,
+        dag_id: str,
+        task_id: str,
+        run_id: str,
+        map_index: int,
+        try_number: int,
+        log_chunk_time: datetime,
+        log_chunk_data: str,
+    ):
+        self.dag_id = dag_id
+        self.task_id = task_id
+        self.run_id = run_id
+        self.map_index = map_index
+        self.try_number = try_number
+        self.log_chunk_time = log_chunk_time
+        self.log_chunk_data = log_chunk_data
+        super().__init__()
+
+
+class EdgeLogs(BaseModel, LoggingMixin):
+    """Accessor for Edge Worker instances as logical model."""
+
+    dag_id: str
+    task_id: str
+    run_id: str
+    map_index: int
+    try_number: int
+    log_chunk_time: datetime
+    log_chunk_data: str
+    model_config = ConfigDict(from_attributes=True, 
arbitrary_types_allowed=True)
+
+    @staticmethod
+    @internal_api_call
+    @provide_session
+    def push_logs(
+        task: TaskInstanceKey | tuple,
+        log_chunk_time: datetime,
+        log_chunk_data: str,
+        session: Session = NEW_SESSION,
+    ) -> None:
+        """Push an incremental log chunk from Edge Worker to central site."""
+        if isinstance(task, tuple):
+            task = TaskInstanceKey(*task)
+        log_chunk = EdgeLogsModel(
+            dag_id=task.dag_id,
+            task_id=task.task_id,
+            run_id=task.run_id,
+            map_index=task.map_index,
+            try_number=task.try_number,
+            log_chunk_time=log_chunk_time,
+            log_chunk_data=log_chunk_data,
+        )
+        session.add(log_chunk)
+        # Write logs to local file to make them accessible
+        logfile_path = EdgeLogs.logfile_path(task)
+        if not logfile_path.exists():
+            new_folder_permissions = int(
+                conf.get("logging", 
"file_task_handler_new_folder_permissions", fallback="0o775"), 8
+            )
+            logfile_path.parent.mkdir(parents=True, exist_ok=True, 
mode=new_folder_permissions)
+        with logfile_path.open("a") as logfile:
+            logfile.write(log_chunk_data)

Review Comment:
   The target is to 1:1 replicate the logs from the "Edge" to the core site as 
if a logger would have written  them. If I would pass them through a logger 
instance we would get another level of prefix and timestamp. In this mode the 
original "logger" settings from the "Edge" are used with the 1:1 time stamps as 
file has been written on the edge. So this is just an incremental upload of 
file chunks from Edge to central.
   
   I would see this as MVP, it is working and increments of log are displayed 
in UI even for a long running task. But of course if a distributed Log 
architecture is used like S3 or LogStash this would need a better integration 
to upload chunks. Have not seen any API in 2.10 code base that I could 
leverage. I'd propose to add this post MVP in an increment.
   
   As well as I assume a "real good" log shipping is something where we would 
need Task SDK in Airflow 3.0... which will take a moment. So this code I would 
not see as long term solution but "make something running". And them improve.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to