This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new be870f6843d Remove Airflow 2.1 compat code in Google provider (#43952)
be870f6843d is described below

commit be870f6843d851a16bdfae17f17b2d4baf97f7e3
Author: Tzu-ping Chung <[email protected]>
AuthorDate: Wed Nov 13 18:42:21 2024 +0800

    Remove Airflow 2.1 compat code in Google provider (#43952)
    
    We've dropped the old version a long time ago. This is unreachable code
    at this point.
---
 .../airflow/providers/google/cloud/sensors/gcs.py  | 17 +++------------
 providers/tests/google/cloud/sensors/test_gcs.py   | 25 ----------------------
 2 files changed, 3 insertions(+), 39 deletions(-)

diff --git a/providers/src/airflow/providers/google/cloud/sensors/gcs.py 
b/providers/src/airflow/providers/google/cloud/sensors/gcs.py
index 0d01f663fa3..2b5782c77ad 100644
--- a/providers/src/airflow/providers/google/cloud/sensors/gcs.py
+++ b/providers/src/airflow/providers/google/cloud/sensors/gcs.py
@@ -178,21 +178,10 @@ def ts_function(context):
     """
     Act as a default callback for the GoogleCloudStorageObjectUpdatedSensor.
 
-    The default behaviour is check for the object being updated after the data 
interval's end,
-    or execution_date + interval on Airflow versions prior to 2.2 (before 
AIP-39 implementation).
+    The default behaviour is check for the object being updated after the data
+    interval's end.
     """
-    try:
-        return context["data_interval_end"]
-    except KeyError:
-        from airflow.utils import timezone
-
-        data_interval = context["dag"].infer_automated_data_interval(
-            timezone.coerce_datetime(context["execution_date"])
-        )
-        next_info = context["dag"].next_dagrun_info(data_interval, 
restricted=False)
-        if next_info is None:
-            return None
-        return next_info.data_interval.start
+    return context["data_interval_end"]
 
 
 class GCSObjectUpdateSensor(BaseSensorOperator):
diff --git a/providers/tests/google/cloud/sensors/test_gcs.py 
b/providers/tests/google/cloud/sensors/test_gcs.py
index a00cecc264a..8e9a5d3bba8 100644
--- a/providers/tests/google/cloud/sensors/test_gcs.py
+++ b/providers/tests/google/cloud/sensors/test_gcs.py
@@ -20,7 +20,6 @@ from __future__ import annotations
 from datetime import datetime, timedelta
 from unittest import mock
 
-import pendulum
 import pytest
 from google.cloud.storage.retry import DEFAULT_RETRY
 
@@ -35,7 +34,6 @@ from airflow.providers.google.cloud.sensors.gcs import (
     GCSObjectsWithPrefixExistenceSensor,
     GCSObjectUpdateSensor,
     GCSUploadSessionCompleteSensor,
-    ts_function,
 )
 from airflow.providers.google.cloud.triggers.gcs import (
     GCSBlobTrigger,
@@ -43,7 +41,6 @@ from airflow.providers.google.cloud.triggers.gcs import (
     GCSPrefixBlobTrigger,
     GCSUploadSessionTrigger,
 )
-from airflow.utils import timezone
 
 TEST_BUCKET = "TEST_BUCKET"
 
@@ -253,28 +250,6 @@ class TestGoogleCloudStorageObjectAsyncSensor:
         mock_log_info.assert_called_with("File %s was found in bucket %s.", 
TEST_OBJECT, TEST_BUCKET)
 
 
-class TestTsFunction:
-    def test_should_support_datetime(self):
-        context = {
-            "dag": DAG(
-                dag_id=TEST_DAG_ID, schedule=timedelta(days=5), 
start_date=datetime(2019, 2, 14, 0, 0)
-            ),
-            "execution_date": datetime(2019, 2, 14, 0, 0),
-        }
-        result = ts_function(context)
-        assert datetime(2019, 2, 19, 0, 0, tzinfo=timezone.utc) == result
-
-    def test_should_support_cron(self):
-        dag = DAG(dag_id=TEST_DAG_ID, start_date=datetime(2019, 2, 19, 0, 0), 
schedule="@weekly")
-
-        context = {
-            "dag": dag,
-            "execution_date": datetime(2019, 2, 19),
-        }
-        result = ts_function(context)
-        assert pendulum.instance(datetime(2019, 2, 24)).isoformat() == 
result.isoformat()
-
-
 class TestGoogleCloudStorageObjectUpdatedSensor:
     @mock.patch("airflow.providers.google.cloud.sensors.gcs.GCSHook")
     def test_should_pass_argument_to_hook(self, mock_hook):

Reply via email to