This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 47bd5dd0e1 Remove useless string join from providers (#33968)
47bd5dd0e1 is described below

commit 47bd5dd0e1e13af45206b94dd5518ada278a9552
Author: Hussein Awala <[email protected]>
AuthorDate: Sun Sep 3 11:48:46 2023 +0200

    Remove useless string join from providers (#33968)
    
    
    Co-authored-by: Wei Lee <[email protected]>
---
 airflow/providers/amazon/aws/log/s3_task_handler.py        |  2 +-
 airflow/providers/cncf/kubernetes/decorators/kubernetes.py | 12 +++++-------
 airflow/providers/google/cloud/log/gcs_task_handler.py     |  2 +-
 airflow/providers/microsoft/azure/log/wasb_task_handler.py |  2 +-
 4 files changed, 8 insertions(+), 10 deletions(-)

diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py 
b/airflow/providers/amazon/aws/log/s3_task_handler.py
index ea7266c28c..d949a8b7b2 100644
--- a/airflow/providers/amazon/aws/log/s3_task_handler.py
+++ b/airflow/providers/amazon/aws/log/s3_task_handler.py
@@ -201,7 +201,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
         try:
             if append and self.s3_log_exists(remote_log_location):
                 old_log = self.s3_read(remote_log_location)
-                log = "\n".join([old_log, log]) if old_log else log
+                log = f"{old_log}\n{log}" if old_log else log
         except Exception:
             self.log.exception("Could not verify previous log to append")
             return False
diff --git a/airflow/providers/cncf/kubernetes/decorators/kubernetes.py 
b/airflow/providers/cncf/kubernetes/decorators/kubernetes.py
index 337a54797d..5b8399ea13 100644
--- a/airflow/providers/cncf/kubernetes/decorators/kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/decorators/kubernetes.py
@@ -100,13 +100,11 @@ class _KubernetesDecoratedOperator(DecoratedOperator, 
KubernetesPodOperator):
         return [
             "bash",
             "-cx",
-            " && ".join(
-                [
-                    write_local_script_file_cmd,
-                    write_local_input_file_cmd,
-                    make_xcom_dir_cmd,
-                    exec_python_cmd,
-                ]
+            (
+                f"{write_local_script_file_cmd} && "
+                f"{write_local_input_file_cmd} && "
+                f"{make_xcom_dir_cmd} && "
+                f"{exec_python_cmd}"
             ),
         ]
 
diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py 
b/airflow/providers/google/cloud/log/gcs_task_handler.py
index 5cede21863..e6c8b1833f 100644
--- a/airflow/providers/google/cloud/log/gcs_task_handler.py
+++ b/airflow/providers/google/cloud/log/gcs_task_handler.py
@@ -241,7 +241,7 @@ class GCSTaskHandler(FileTaskHandler, LoggingMixin):
         try:
             blob = storage.Blob.from_string(remote_log_location, self.client)
             old_log = blob.download_as_bytes().decode()
-            log = "\n".join([old_log, log]) if old_log else log
+            log = f"{old_log}\n{log}" if old_log else log
         except Exception as e:
             if not self.no_log_found(e):
                 log += self._add_message(
diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py 
b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
index bee51108e4..25f864f6ff 100644
--- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py
+++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
@@ -238,7 +238,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
         """
         if append and self.wasb_log_exists(remote_log_location):
             old_log = self.wasb_read(remote_log_location)
-            log = "\n".join([old_log, log]) if old_log else log
+            log = f"{old_log}\n{log}" if old_log else log
 
         try:
             self.hook.load_string(log, self.wasb_container, 
remote_log_location, overwrite=True)

Reply via email to