This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 8e42a2e019 Remove broken deprecated fallback into the Google provider 
operators (#37740)
8e42a2e019 is described below

commit 8e42a2e019dc89d95ed6dce8dff75e18abf6e97a
Author: Andrey Anshin <andrey.ans...@taragol.is>
AuthorDate: Tue Feb 27 17:07:46 2024 +0400

    Remove broken deprecated fallback into the Google provider operators 
(#37740)
---
 .../cloud/operators/cloud_storage_transfer_service.py | 19 +++----------------
 .../providers/google/cloud/transfers/gcs_to_local.py  | 15 ++-------------
 2 files changed, 5 insertions(+), 29 deletions(-)

diff --git 
a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py 
b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
index 718dc91ab8..5b12cbbc5c 100644
--- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
@@ -22,7 +22,7 @@ from copy import deepcopy
 from datetime import date, time
 from typing import TYPE_CHECKING, Sequence
 
-from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
+from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service 
import (
     ACCESS_KEY_ID,
@@ -537,37 +537,24 @@ class 
CloudDataTransferServiceListOperationsOperator(GoogleCloudBaseOperator):
 
     def __init__(
         self,
-        request_filter: dict | None = None,
+        request_filter: dict,
         project_id: str | None = None,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
         google_impersonation_chain: str | Sequence[str] | None = None,
         **kwargs,
     ) -> None:
-        # To preserve backward compatibility
-        # TODO: remove one day
-        if request_filter is None:
-            if "filter" in kwargs:
-                request_filter = kwargs["filter"]
-                AirflowProviderDeprecationWarning(
-                    "Use 'request_filter' instead 'filter' to pass the 
argument."
-                )
-            else:
-                TypeError("__init__() missing 1 required positional argument: 
'request_filter'")
-
         super().__init__(**kwargs)
         self.filter = request_filter
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
         self.google_impersonation_chain = google_impersonation_chain
-        self._validate_inputs()
 
-    def _validate_inputs(self) -> None:
+    def execute(self, context: Context) -> list[dict]:
         if not self.filter:
             raise AirflowException("The required parameter 'filter' is empty 
or None")
 
-    def execute(self, context: Context) -> list[dict]:
         hook = CloudDataTransferServiceHook(
             api_version=self.api_version,
             gcp_conn_id=self.gcp_conn_id,
diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py 
b/airflow/providers/google/cloud/transfers/gcs_to_local.py
index 0dcd47de48..b496a3cf6a 100644
--- a/airflow/providers/google/cloud/transfers/gcs_to_local.py
+++ b/airflow/providers/google/cloud/transfers/gcs_to_local.py
@@ -18,7 +18,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING, Sequence
 
-from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
+from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
 from airflow.models.xcom import MAX_XCOM_SIZE
 from airflow.providers.google.cloud.hooks.gcs import GCSHook
@@ -77,7 +77,7 @@ class GCSToLocalFilesystemOperator(BaseOperator):
         self,
         *,
         bucket: str,
-        object_name: str | None = None,
+        object_name: str,
         filename: str | None = None,
         store_to_xcom_key: str | None = None,
         gcp_conn_id: str = "google_cloud_default",
@@ -85,19 +85,8 @@ class GCSToLocalFilesystemOperator(BaseOperator):
         file_encoding: str = "utf-8",
         **kwargs,
     ) -> None:
-        # To preserve backward compatibility
-        # TODO: Remove one day
-        if object_name is None:
-            object_name = kwargs.get("object")
-            if object_name is not None:
-                self.object_name = object_name
-                AirflowProviderDeprecationWarning("Use 'object_name' instead 
of 'object'.")
-            else:
-                TypeError("__init__() missing 1 required positional argument: 
'object_name'")
-
         if filename is not None and store_to_xcom_key is not None:
             raise ValueError("Either filename or store_to_xcom_key can be set")
-
         super().__init__(**kwargs)
         self.bucket = bucket
         self.filename = filename

Reply via email to