This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c90eec9365 Use f-string instead of  in Airflow providers (#33752)
c90eec9365 is described below

commit c90eec936583d482a35f0dc8bfc13afc58a9c322
Author: Hussein Awala <[email protected]>
AuthorDate: Sat Aug 26 08:46:51 2023 +0200

    Use f-string instead of  in Airflow providers (#33752)
---
 airflow/providers/apache/hive/sensors/metastore_partition.py |  6 ++----
 airflow/providers/databricks/hooks/databricks.py             |  8 +++-----
 airflow/providers/elasticsearch/hooks/elasticsearch.py       |  4 ++--
 airflow/providers/google/cloud/hooks/bigquery.py             | 10 ++++------
 airflow/providers/google/cloud/operators/functions.py        |  6 +++---
 5 files changed, 14 insertions(+), 20 deletions(-)

diff --git a/airflow/providers/apache/hive/sensors/metastore_partition.py 
b/airflow/providers/apache/hive/sensors/metastore_partition.py
index 043334e459..f25c84bf9c 100644
--- a/airflow/providers/apache/hive/sensors/metastore_partition.py
+++ b/airflow/providers/apache/hive/sensors/metastore_partition.py
@@ -72,7 +72,7 @@ class MetastorePartitionSensor(SqlSensor):
             self.first_poke = False
             if "." in self.table:
                 self.schema, self.table = self.table.split(".")
-            self.sql = """
+            self.sql = f"""
             SELECT 'X'
             FROM PARTITIONS A0
             LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
@@ -81,7 +81,5 @@ class MetastorePartitionSensor(SqlSensor):
                 B0.TBL_NAME = '{self.table}' AND
                 C0.NAME = '{self.schema}' AND
                 A0.PART_NAME = '{self.partition_name}';
-            """.format(
-                self=self
-            )
+            """
         return super().poke(context)
diff --git a/airflow/providers/databricks/hooks/databricks.py 
b/airflow/providers/databricks/hooks/databricks.py
index 64ac9b341d..2a7a8b91ff 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -75,11 +75,9 @@ class RunState:
         """True if the current state is a terminal state."""
         if self.life_cycle_state not in RUN_LIFE_CYCLE_STATES:
             raise AirflowException(
-                (
-                    "Unexpected life cycle state: {}: If the state has "
-                    "been introduced recently, please check the Databricks 
user "
-                    "guide for troubleshooting information"
-                ).format(self.life_cycle_state)
+                f"Unexpected life cycle state: {self.life_cycle_state}: If the 
state has "
+                "been introduced recently, please check the Databricks user "
+                "guide for troubleshooting information"
             )
         return self.life_cycle_state in ("TERMINATED", "SKIPPED", 
"INTERNAL_ERROR")
 
diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py 
b/airflow/providers/elasticsearch/hooks/elasticsearch.py
index c3a792eb69..13b2573093 100644
--- a/airflow/providers/elasticsearch/hooks/elasticsearch.py
+++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py
@@ -116,11 +116,11 @@ class ElasticsearchSQLHook(DbApiHook):
 
         login = ""
         if conn.login:
-            login = "{conn.login}:{conn.password}@".format(conn=conn)
+            login = f"{conn.login}:{conn.password}@"
         host = conn.host
         if conn.port is not None:
             host += f":{conn.port}"
-        uri = 
"{conn.conn_type}+{conn.schema}://{login}{host}/".format(conn=conn, 
login=login, host=host)
+        uri = f"{conn.conn_type}+{conn.schema}://{login}{host}/"
 
         extras_length = len(conn.extra_dejson)
         if not extras_length:
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py 
b/airflow/providers/google/cloud/hooks/bigquery.py
index 9f4be0d4f8..f5f532b154 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -3033,12 +3033,10 @@ def _api_resource_configs_duplication_check(
 ) -> None:
     if key in config_dict and value != config_dict[key]:
         raise ValueError(
-            "Values of {param_name} param are duplicated. "
-            "{dict_name} contained {param_name} param "
-            "in `query` config and {param_name} was also provided "
-            "with arg to run_query() method. Please remove duplicates.".format(
-                param_name=key, dict_name=config_dict_name
-            )
+            f"Values of {key} param are duplicated. "
+            f"{config_dict_name} contained {key} param "
+            f"in `query` config and {key} was also provided "
+            "with arg to run_query() method. Please remove duplicates."
         )
 
 
diff --git a/airflow/providers/google/cloud/operators/functions.py 
b/airflow/providers/google/cloud/operators/functions.py
index 1131790cab..f58be0a017 100644
--- a/airflow/providers/google/cloud/operators/functions.py
+++ b/airflow/providers/google/cloud/operators/functions.py
@@ -282,9 +282,9 @@ class ZipPathPreprocessor:
         if self._is_present_and_empty(self.body, GCF_SOURCE_UPLOAD_URL):
             if not self.zip_path:
                 raise AirflowException(
-                    "Parameter '{url}' is empty in the body and argument 
'{path}' "
-                    "is missing or empty. You need to have non empty '{path}' "
-                    "when '{url}' is present and 
empty.".format(url=GCF_SOURCE_UPLOAD_URL, path=GCF_ZIP_PATH)
+                    f"Parameter '{GCF_SOURCE_UPLOAD_URL}' is empty in the body 
and argument '{GCF_ZIP_PATH}' "
+                    f"is missing or empty. You need to have non empty 
'{GCF_ZIP_PATH}' "
+                    f"when '{GCF_SOURCE_UPLOAD_URL}' is present and empty."
                 )
 
     def _verify_upload_url_and_zip_path(self) -> None:

Reply via email to