This is an automated email from the ASF dual-hosted git repository.

uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 97948ecae7 Move fallible ti.task.dag assignment back inside try/except 
block (#24533) (#24592)
97948ecae7 is described below

commit 97948ecae7fcbb7dfdfb169cfe653bd20a108def
Author: EJ Kreinar <[email protected]>
AuthorDate: Thu Jun 30 12:40:25 2022 -0400

    Move fallible ti.task.dag assignment back inside try/except block (#24533) 
(#24592)
    
    * Move fallible ti.task.dag assignment back inside try/except block
    
    It looks like ti.task.dag was originally protected inside try/except,
    but was moved out at commit 7be87d
    
    * Remove unneeded variable annotation
    
    Co-authored-by: EJ Kreinar <[email protected]>
    Co-authored-by: Tzu-ping Chung <[email protected]>
---
 airflow/providers/elasticsearch/log/es_task_handler.py |  7 ++++---
 airflow/utils/log/file_task_handler.py                 | 10 +++++-----
 2 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py 
b/airflow/providers/elasticsearch/log/es_task_handler.py
index 64fce0df53..4707f523d6 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -125,12 +125,13 @@ class ElasticsearchTaskHandler(FileTaskHandler, 
ExternalLoggingMixin, LoggingMix
             else:
                 log_id_template = self.log_id_template
 
-        dag = ti.task.dag
-        assert dag is not None  # For Mypy.
         try:
-            data_interval: Tuple[datetime, datetime] = 
dag.get_run_data_interval(dag_run)
+            dag = ti.task.dag
         except AttributeError:  # ti.task is not always set.
             data_interval = (dag_run.data_interval_start, 
dag_run.data_interval_end)
+        else:
+            assert dag is not None  # For Mypy.
+            data_interval = dag.get_run_data_interval(dag_run)
 
         if self.json_format:
             data_interval_start = self._clean_date(data_interval[0])
diff --git a/airflow/utils/log/file_task_handler.py 
b/airflow/utils/log/file_task_handler.py
index 2c53529a72..471d5b95be 100644
--- a/airflow/utils/log/file_task_handler.py
+++ b/airflow/utils/log/file_task_handler.py
@@ -19,9 +19,8 @@
 import logging
 import os
 import warnings
-from datetime import datetime
 from pathlib import Path
-from typing import TYPE_CHECKING, Optional, Tuple
+from typing import TYPE_CHECKING, Optional
 
 from airflow.configuration import AirflowConfigException, conf
 from airflow.utils.context import Context
@@ -93,12 +92,13 @@ class FileTaskHandler(logging.Handler):
             context["try_number"] = try_number
             return render_template_to_string(jinja_tpl, context)
         elif str_tpl:
-            dag = ti.task.dag
-            assert dag is not None  # For Mypy.
             try:
-                data_interval: Tuple[datetime, datetime] = 
dag.get_run_data_interval(dag_run)
+                dag = ti.task.dag
             except AttributeError:  # ti.task is not always set.
                 data_interval = (dag_run.data_interval_start, 
dag_run.data_interval_end)
+            else:
+                assert dag is not None  # For Mypy.
+                data_interval = dag.get_run_data_interval(dag_run)
             if data_interval[0]:
                 data_interval_start = data_interval[0].isoformat()
             else:

Reply via email to