This is an automated email from the ASF dual-hosted git repository.
vincbeck pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new a2db179d9c Init checks for aws gcs_to_s3 (#37662)
a2db179d9c is described below
commit a2db179d9c75e4963323852732b6d22a25485615
Author: Albert Olweny <[email protected]>
AuthorDate: Sat Feb 24 01:17:04 2024 +0300
Init checks for aws gcs_to_s3 (#37662)
---
.pre-commit-config.yaml | 1 -
airflow/providers/amazon/aws/transfers/gcs_to_s3.py | 8 +++-----
2 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b0e1a27700..434aaea43f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -325,7 +325,6 @@ repos:
exclude: |
(?x)^(
^.*__init__\.py$|
- ^airflow\/providers\/amazon\/aws\/transfers\/gcs_to_s3\.py$|
^airflow\/providers\/databricks\/operators\/databricks\.py$|
^airflow\/providers\/amazon\/aws\/transfers\/redshift_to_s3\.py$|
^airflow\/providers\/google\/cloud\/operators\/compute\.py$|
diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
index cdf0dcdc44..c76087faae 100644
--- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
@@ -118,7 +118,6 @@ class GCSToS3Operator(BaseOperator):
**kwargs,
) -> None:
super().__init__(**kwargs)
-
if bucket:
warnings.warn(
"The ``bucket`` parameter is deprecated and will be removed in
a future version. "
@@ -126,11 +125,10 @@ class GCSToS3Operator(BaseOperator):
AirflowProviderDeprecationWarning,
stacklevel=2,
)
- self.gcs_bucket = bucket
- if gcs_bucket:
- self.gcs_bucket = gcs_bucket
+ self.gcs_bucket = gcs_bucket or bucket
if not (bucket or gcs_bucket):
raise ValueError("You must pass either ``bucket`` or
``gcs_bucket``.")
+
self.prefix = prefix
self.gcp_conn_id = gcp_conn_id
self.dest_aws_conn_id = dest_aws_conn_id
@@ -219,7 +217,7 @@ class GCSToS3Operator(BaseOperator):
if gcs_files:
for file in gcs_files:
with gcs_hook.provide_file(
- object_name=file, bucket_name=self.gcs_bucket,
user_project=self.gcp_user_project
+ object_name=file, bucket_name=str(self.gcs_bucket),
user_project=self.gcp_user_project
) as local_tmp_file:
dest_key = os.path.join(self.dest_s3_key, file)
self.log.info("Saving file to %s", dest_key)