sunank200 commented on code in PR #43902:
URL: https://github.com/apache/airflow/pull/43902#discussion_r1841984866


##########
newsfragments/43902.significant.rst:
##########


Review Comment:
   Changed it



##########
providers/src/airflow/providers/google/cloud/operators/gcs.py:
##########
@@ -36,13 +36,18 @@
 
 from google.api_core.exceptions import Conflict
 from google.cloud.exceptions import GoogleCloudError
+from packaging.version import Version
 
+from airflow import __version__ as airflow_version
 from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
 from airflow.providers.google.cloud.hooks.gcs import GCSHook
 from airflow.providers.google.cloud.operators.cloud_base import 
GoogleCloudBaseOperator
 from airflow.providers.google.common.links.storage import FileDetailsLink, 
StorageLink
 from airflow.utils import timezone
 
+AIRFLOW_VERSION = Version(airflow_version)
+AIRFLOW_V_3_0_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("3.0.0")

Review Comment:
   Removed it



##########
providers/src/airflow/providers/presto/hooks/presto.py:
##########
@@ -22,32 +22,38 @@
 from typing import TYPE_CHECKING, Any, Iterable, Mapping, TypeVar
 
 import prestodb
+from packaging.version import Version
 from prestodb.exceptions import DatabaseError
 from prestodb.transaction import IsolationLevel
 
+from airflow import __version__ as airflow_version
 from airflow.configuration import conf
 from airflow.exceptions import AirflowException
 from airflow.providers.common.sql.hooks.sql import DbApiHook
 from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING, 
DEFAULT_FORMAT_PREFIX
 
+AIRFLOW_VERSION = Version(airflow_version)
+AIRFLOW_V_3_0_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("3.0.0")
+
 if TYPE_CHECKING:
     from airflow.models import Connection
 
 T = TypeVar("T")
 
 
 def generate_presto_client_info() -> str:
-    """Return json string with dag_id, task_id, execution_date and 
try_number."""
+    """Return json string with dag_id, task_id, logical_date and try_number."""
     context_var = {
         format_map["default"].replace(DEFAULT_FORMAT_PREFIX, ""): 
os.environ.get(
             format_map["env_var_format"], ""
         )
         for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()
     }
+    date_key = "logical_date" if AIRFLOW_V_3_0_PLUS else "execution_date"
     task_info = {
         "dag_id": context_var["dag_id"],
         "task_id": context_var["task_id"],
-        "execution_date": context_var["execution_date"],
+        date_key: context_var["logical_date"] if AIRFLOW_V_3_0_PLUS else 
context_var["execution_date"],

Review Comment:
   Changed it



##########
providers/src/airflow/providers/standard/operators/python.py:
##########
@@ -310,7 +309,7 @@ def get_tasks_to_skip():
             self.skip(
                 dag_run=dag_run,
                 tasks=to_skip,
-                execution_date=cast("DateTime", dag_run.execution_date),  # 
type: ignore[call-arg]
+                execution_date=cast("DateTime", dag_run.execution_date),  # 
type: ignore[call-arg, union-attr]

Review Comment:
   Changed it



##########
providers/src/airflow/providers/google/cloud/operators/gcs.py:
##########
@@ -36,13 +36,18 @@
 
 from google.api_core.exceptions import Conflict
 from google.cloud.exceptions import GoogleCloudError
+from packaging.version import Version
 
+from airflow import __version__ as airflow_version
 from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
 from airflow.providers.google.cloud.hooks.gcs import GCSHook
 from airflow.providers.google.cloud.operators.cloud_base import 
GoogleCloudBaseOperator
 from airflow.providers.google.common.links.storage import FileDetailsLink, 
StorageLink
 from airflow.utils import timezone
 
+AIRFLOW_VERSION = Version(airflow_version)
+AIRFLOW_V_3_0_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("3.0.0")

Review Comment:
   Removed it



##########
providers/src/airflow/providers/opensearch/log/os_task_handler.py:
##########
@@ -306,17 +313,32 @@ def _render_log_id(self, ti: TaskInstance | 
TaskInstanceKey, try_number: int) ->
                 data_interval_end = data_interval[1].isoformat()
             else:
                 data_interval_end = ""
-            execution_date = dag_run.execution_date.isoformat()
-
-        return log_id_template.format(
-            dag_id=ti.dag_id,
-            task_id=ti.task_id,
-            run_id=getattr(ti, "run_id", ""),
-            data_interval_start=data_interval_start,
-            data_interval_end=data_interval_end,
-            execution_date=execution_date,
-            try_number=try_number,
-            map_index=getattr(ti, "map_index", ""),
+            logical_date = (
+                dag_run.logical_date.isoformat() if AIRFLOW_V_3_0_PLUS else 
dag_run.execution_date.isoformat()
+            )

Review Comment:
   Changed it



##########
providers/src/airflow/providers/elasticsearch/log/es_task_handler.py:
##########
@@ -266,15 +271,18 @@ def _render_log_id(self, ti: TaskInstance | 
TaskInstanceKey, try_number: int) ->
                 data_interval_end = data_interval[1].isoformat()
             else:
                 data_interval_end = ""
-            execution_date = dag_run.execution_date.isoformat()
+            logical_date = (
+                dag_run.logical_date.isoformat() if AIRFLOW_V_3_0_PLUS else 
dag_run.execution_date.isoformat()
+            )

Review Comment:
   Changed it



##########
providers/src/airflow/providers/google/cloud/sensors/gcs.py:
##########
@@ -42,6 +44,8 @@
     from google.api_core.retry import Retry
 
     from airflow.utils.context import Context
+AIRFLOW_VERSION = Version(airflow_version)
+AIRFLOW_V_3_0_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("3.0.0")

Review Comment:
   Changed it



##########
airflow/utils/context.py:
##########
@@ -451,7 +426,6 @@ def context_copy_partial(source: Context, keys: 
Container[str]) -> Context:
     :meta private:
     """
     new = Context({k: v for k, v in source._context.items() if k in keys})
-    new._deprecation_replacements = source._deprecation_replacements.copy()

Review Comment:
   Changed it



##########
providers/src/airflow/providers/opensearch/log/os_task_handler.py:
##########
@@ -306,17 +313,32 @@ def _render_log_id(self, ti: TaskInstance | 
TaskInstanceKey, try_number: int) ->
                 data_interval_end = data_interval[1].isoformat()
             else:
                 data_interval_end = ""
-            execution_date = dag_run.execution_date.isoformat()
-
-        return log_id_template.format(
-            dag_id=ti.dag_id,
-            task_id=ti.task_id,
-            run_id=getattr(ti, "run_id", ""),
-            data_interval_start=data_interval_start,
-            data_interval_end=data_interval_end,
-            execution_date=execution_date,
-            try_number=try_number,
-            map_index=getattr(ti, "map_index", ""),
+            logical_date = (
+                dag_run.logical_date.isoformat() if AIRFLOW_V_3_0_PLUS else 
dag_run.execution_date.isoformat()
+            )
+
+        return (
+            log_id_template.format(
+                dag_id=ti.dag_id,
+                task_id=ti.task_id,
+                run_id=getattr(ti, "run_id", ""),
+                data_interval_start=data_interval_start,
+                data_interval_end=data_interval_end,
+                logical_date=logical_date,
+                try_number=try_number,
+                map_index=getattr(ti, "map_index", ""),
+            )
+            if AIRFLOW_V_3_0_PLUS
+            else log_id_template.format(
+                dag_id=ti.dag_id,
+                task_id=ti.task_id,
+                run_id=getattr(ti, "run_id", ""),
+                data_interval_start=data_interval_start,
+                data_interval_end=data_interval_end,
+                execution_date=logical_date,
+                try_number=try_number,
+                map_index=getattr(ti, "map_index", ""),
+            )
         )

Review Comment:
   Changed it



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to