This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-3-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c658a48d4332b0496208df81fc9df2888b333a0d
Author: Josh Fell <[email protected]>
AuthorDate: Mon May 9 13:44:41 2022 -0400

    Clean up in-line f-string concatenation (#23591)
    
    (cherry picked from commit 428a43995390b3623a51aa7bac7e21da69a8db22)
---
 airflow/providers/amazon/aws/utils/redshift.py                      | 2 +-
 airflow/providers/apache/drill/hooks/drill.py                       | 2 +-
 airflow/providers/databricks/operators/databricks_repos.py          | 2 +-
 airflow/providers/google/cloud/hooks/datafusion.py                  | 2 +-
 airflow/providers/google/cloud/utils/credentials_provider.py        | 2 +-
 airflow/providers/hashicorp/_internal_client/vault_client.py        | 2 +-
 airflow/utils/cli.py                                                | 4 ++--
 dev/assign_cherry_picked_prs_with_milestone.py                      | 2 +-
 .../commands/configuration_and_maintenance_commands.py              | 6 +++---
 dev/breeze/src/airflow_breeze/utils/custom_param_types.py           | 4 +---
 dev/breeze/src/airflow_breeze/utils/run_utils.py                    | 2 +-
 dev/prepare_release_issue.py                                        | 2 +-
 dev/provider_packages/prepare_provider_packages.py                  | 4 ++--
 docs/apache-airflow/security/webserver.rst                          | 4 +---
 scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py          | 2 +-
 tests/cli/test_cli_parser.py                                        | 6 +++---
 tests/system/providers/google/bigquery/example_bigquery_tables.py   | 2 +-
 17 files changed, 23 insertions(+), 27 deletions(-)

diff --git a/airflow/providers/amazon/aws/utils/redshift.py 
b/airflow/providers/amazon/aws/utils/redshift.py
index d02dcc5977..bb64c9b46f 100644
--- a/airflow/providers/amazon/aws/utils/redshift.py
+++ b/airflow/providers/amazon/aws/utils/redshift.py
@@ -43,7 +43,7 @@ def build_credentials_block(credentials: ReadOnlyCredentials) 
-> str:
 
     else:
         credentials_line = (
-            f"aws_access_key_id={credentials.access_key};" 
f"aws_secret_access_key={credentials.secret_key}"
+            
f"aws_access_key_id={credentials.access_key};aws_secret_access_key={credentials.secret_key}"
         )
 
     return credentials_line
diff --git a/airflow/providers/apache/drill/hooks/drill.py 
b/airflow/providers/apache/drill/hooks/drill.py
index 820f936905..a15658e9e3 100644
--- a/airflow/providers/apache/drill/hooks/drill.py
+++ b/airflow/providers/apache/drill/hooks/drill.py
@@ -72,7 +72,7 @@ class DrillHook(DbApiHook):
         conn_type = 'drill' if not conn_md.conn_type else conn_md.conn_type
         dialect_driver = conn_md.extra_dejson.get('dialect_driver', 
'drill+sadrill')
         storage_plugin = conn_md.extra_dejson.get('storage_plugin', 'dfs')
-        return f'{conn_type}://{host}/{storage_plugin}' 
f'?dialect_driver={dialect_driver}'
+        return 
f'{conn_type}://{host}/{storage_plugin}?dialect_driver={dialect_driver}'
 
     def set_autocommit(self, conn: Connection, autocommit: bool) -> 
NotImplementedError:
         raise NotImplementedError("There are no transactions in Drill.")
diff --git a/airflow/providers/databricks/operators/databricks_repos.py 
b/airflow/providers/databricks/operators/databricks_repos.py
index 15543cc509..982adcfee2 100644
--- a/airflow/providers/databricks/operators/databricks_repos.py
+++ b/airflow/providers/databricks/operators/databricks_repos.py
@@ -90,7 +90,7 @@ class DatabricksReposCreateOperator(BaseOperator):
             self.git_provider = self.__detect_repo_provider__(git_url)
             if self.git_provider is None:
                 raise AirflowException(
-                    "git_provider isn't specified and couldn't be guessed" f" 
for URL {git_url}"
+                    "git_provider isn't specified and couldn't be guessed for 
URL {git_url}"
                 )
         else:
             self.git_provider = git_provider
diff --git a/airflow/providers/google/cloud/hooks/datafusion.py 
b/airflow/providers/google/cloud/hooks/datafusion.py
index f12d3339cf..8068c3ece2 100644
--- a/airflow/providers/google/cloud/hooks/datafusion.py
+++ b/airflow/providers/google/cloud/hooks/datafusion.py
@@ -115,7 +115,7 @@ class DataFusionHook(GoogleBaseHook):
                 return
             if current_state in failure_states:
                 raise AirflowException(
-                    f"Pipeline {pipeline_name} state {current_state} is not " 
f"one of {success_states}"
+                    f"Pipeline {pipeline_name} state {current_state} is not 
one of {success_states}"
                 )
             sleep(30)
 
diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py 
b/airflow/providers/google/cloud/utils/credentials_provider.py
index 0af03d2459..ac1cfb42a3 100644
--- a/airflow/providers/google/cloud/utils/credentials_provider.py
+++ b/airflow/providers/google/cloud/utils/credentials_provider.py
@@ -369,5 +369,5 @@ def 
_get_project_id_from_service_account_email(service_account_email: str) -> st
         return service_account_email.split('@')[1].split('.')[0]
     except IndexError:
         raise AirflowException(
-            f"Could not extract project_id from service account's email: " 
f"{service_account_email}."
+            f"Could not extract project_id from service account's email: 
{service_account_email}."
         )
diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py 
b/airflow/providers/hashicorp/_internal_client/vault_client.py
index 9eecf26f87..ee36c21f7e 100644
--- a/airflow/providers/hashicorp/_internal_client/vault_client.py
+++ b/airflow/providers/hashicorp/_internal_client/vault_client.py
@@ -123,7 +123,7 @@ class _VaultClient(LoggingMixin):
             )
         if auth_type not in VALID_AUTH_TYPES:
             raise VaultError(
-                f"The auth_type is not supported: {auth_type}. " f"It should 
be one of {VALID_AUTH_TYPES}"
+                f"The auth_type is not supported: {auth_type}. It should be 
one of {VALID_AUTH_TYPES}"
             )
         if auth_type == "token" and not token and not token_path:
             raise VaultError("The 'token' authentication type requires 'token' 
or 'token_path'")
diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py
index 496a411444..93de9ef92b 100644
--- a/airflow/utils/cli.py
+++ b/airflow/utils/cli.py
@@ -49,7 +49,7 @@ def _check_cli_args(args):
         raise ValueError("Args should be set")
     if not isinstance(args[0], Namespace):
         raise ValueError(
-            "1st positional argument should be argparse.Namespace instance," 
f"but is {type(args[0])}"
+            f"1st positional argument should be argparse.Namespace instance, 
but is {type(args[0])}"
         )
 
 
@@ -148,7 +148,7 @@ def _build_metrics(func_name, namespace):
 
     if not isinstance(namespace, Namespace):
         raise ValueError(
-            "namespace argument should be argparse.Namespace instance," f"but 
is {type(namespace)}"
+            f"namespace argument should be argparse.Namespace instance, but is 
{type(namespace)}"
         )
     tmp_dic = vars(namespace)
     metrics['dag_id'] = tmp_dic.get('dag_id')
diff --git a/dev/assign_cherry_picked_prs_with_milestone.py 
b/dev/assign_cherry_picked_prs_with_milestone.py
index 58bbd38d67..6ed956a372 100755
--- a/dev/assign_cherry_picked_prs_with_milestone.py
+++ b/dev/assign_cherry_picked_prs_with_milestone.py
@@ -294,7 +294,7 @@ def assign_prs(
             continue
         console.print('-' * 80)
         console.print(
-            f"\n >>>> Retrieving PR#{pr_number}: " 
f"https://github.com/apache/airflow/pull/{pr_number}";
+            f"\n >>>> Retrieving PR#{pr_number}: 
https://github.com/apache/airflow/pull/{pr_number}";
         )
         pr: PullRequest
         try:
diff --git 
a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py
 
b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py
index 2267cdd5a7..a9b3180292 100644
--- 
a/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py
+++ 
b/dev/breeze/src/airflow_breeze/commands/configuration_and_maintenance_commands.py
@@ -305,10 +305,10 @@ def version(verbose: bool, python: str):
             f"{get_installation_sources_config_metadata_hash()}[/]"
         )
         get_console().print(
-            f"[info]Used sources config hash         : " 
f"{get_used_sources_setup_metadata_hash()}[/]"
+            f"[info]Used sources config hash         : 
{get_used_sources_setup_metadata_hash()}[/]"
         )
         get_console().print(
-            f"[info]Package config hash              : " 
f"{(get_package_setup_metadata_hash())}[/]\n"
+            f"[info]Package config hash              : 
{(get_package_setup_metadata_hash())}[/]\n"
         )
 
 
@@ -497,7 +497,7 @@ def write_to_shell(command_to_execute: str, dry_run: bool, 
script_path: str, for
     else:
         get_console().print(f"[info]The autocomplete script would be added to 
{script_path}[/]")
     get_console().print(
-        f"\n[warning]Please exit and re-enter your shell or run:[/]" f"\n\n   
source {script_path}\n"
+        f"\n[warning]Please exit and re-enter your shell or run:[/]\n\n   
source {script_path}\n"
     )
     return True
 
diff --git a/dev/breeze/src/airflow_breeze/utils/custom_param_types.py 
b/dev/breeze/src/airflow_breeze/utils/custom_param_types.py
index 1851a95467..0dd2b87d67 100644
--- a/dev/breeze/src/airflow_breeze/utils/custom_param_types.py
+++ b/dev/breeze/src/airflow_breeze/utils/custom_param_types.py
@@ -89,9 +89,7 @@ class CacheableChoice(click.Choice):
         if isinstance(value, CacheableDefault):
             is_cached, new_value = 
read_and_validate_value_from_cache(param_name, value.value)
             if not is_cached:
-                get_console().print(
-                    f"\n[info]Default value of {param.name} " f"parameter 
{new_value} used.[/]\n"
-                )
+                get_console().print(f"\n[info]Default value of {param.name} 
parameter {new_value} used.[/]\n")
         else:
             allowed, allowed_values = check_if_values_allowed(param_name, 
value)
             if allowed:
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py 
b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index e4d4101939..e02196c724 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -199,7 +199,7 @@ def get_filesystem_type(filepath):
 
 def instruct_build_image(python: str):
     """Print instructions to the user that they should build the image"""
-    get_console().print(f'[warning]\nThe CI image for ' f'python version 
{python} may be outdated[/]\n')
+    get_console().print(f'[warning]\nThe CI image for Python version {python} 
may be outdated[/]\n')
     get_console().print(
         f"\n[info]Please run at the earliest convenience:[/]\n\nbreeze 
build-image --python {python}\n\n"
     )
diff --git a/dev/prepare_release_issue.py b/dev/prepare_release_issue.py
index e61fad7445..37f89cdede 100755
--- a/dev/prepare_release_issue.py
+++ b/dev/prepare_release_issue.py
@@ -274,7 +274,7 @@ def generate_issue_content(
         for i in range(count_prs):
             pr_number = prs[i]
             progress.console.print(
-                f"Retrieving PR#{pr_number}: " 
f"https://github.com/apache/airflow/pull/{pr_number}";
+                f"Retrieving PR#{pr_number}: 
https://github.com/apache/airflow/pull/{pr_number}";
             )
 
             pr: PullRequestOrIssue
diff --git a/dev/provider_packages/prepare_provider_packages.py 
b/dev/provider_packages/prepare_provider_packages.py
index e018b38379..2316cdaa7d 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -1773,7 +1773,7 @@ def generate_new_changelog(package_id, provider_details, 
changelog_path, changes
         )
     else:
         console.print(
-            f"[green]Appending the provider {package_id} changelog for" 
f"`{latest_version}` version.[/]"
+            f"[green]Appending the provider {package_id} changelog for 
`{latest_version}` version.[/]"
         )
     with open(changelog_path, "wt") as changelog:
         changelog.write("\n".join(new_changelog_lines))
@@ -1914,7 +1914,7 @@ def generate_issue_content(
             for i in range(len(pr_list)):
                 pr_number = pr_list[i]
                 progress.console.print(
-                    f"Retrieving PR#{pr_number}: " 
f"https://github.com/apache/airflow/pull/{pr_number}";
+                    f"Retrieving PR#{pr_number}: 
https://github.com/apache/airflow/pull/{pr_number}";
                 )
                 try:
                     pull_requests[pr_number] = repo.get_pull(pr_number)
diff --git a/docs/apache-airflow/security/webserver.rst 
b/docs/apache-airflow/security/webserver.rst
index 3ec4ea8716..c8f0ad5f3b 100644
--- a/docs/apache-airflow/security/webserver.rst
+++ b/docs/apache-airflow/security/webserver.rst
@@ -234,9 +234,7 @@ webserver_config.py itself if you wish.
             team_data = remote_app.get("user/teams")
             teams = team_parser(team_data.json())
             roles = map_roles(teams)
-            log.debug(
-                f"User info from Github: {user_data}\n" f"Team info from 
Github: {teams}"
-            )
+            log.debug(f"User info from Github: {user_data}\nTeam info from 
Github: {teams}")
             return {"username": "github_" + user_data.get("login"), 
"role_keys": roles}
 
 
diff --git a/scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py 
b/scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py
index 1eb368c86e..d007ab0998 100755
--- a/scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py
+++ b/scripts/ci/pre_commit/pre_commit_check_pre_commit_hooks.py
@@ -64,7 +64,7 @@ def get_errors_and_hooks(content: Any, max_length: int) -> 
Tuple[List[str], Dict
             name = hook['name']
             if len(name) > max_length:
                 errors.append(
-                    f"Name is too long for hook {hook_id} in 
{PRE_COMMIT_YAML_FILE}. " f"Please shorten it!"
+                    f"Name is too long for hook {hook_id} in 
{PRE_COMMIT_YAML_FILE}. Please shorten it!"
                 )
                 continue
             hooks[hook_id].append(name)
diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py
index af5788c83c..b2de80543a 100644
--- a/tests/cli/test_cli_parser.py
+++ b/tests/cli/test_cli_parser.py
@@ -85,9 +85,9 @@ class TestCli(TestCase):
         for group, command in subcommand.items():
             for com in command:
                 conflict_arg = [arg for arg, count in 
Counter(com.args).items() if count > 1]
-                assert [] == conflict_arg, (
-                    f"Command group {group} function {com.name} have " 
f"conflict args name {conflict_arg}"
-                )
+                assert (
+                    [] == conflict_arg
+                ), f"Command group {group} function {com.name} have conflict 
args name {conflict_arg}"
 
     def test_subcommand_arg_flag_conflict(self):
         """
diff --git a/tests/system/providers/google/bigquery/example_bigquery_tables.py 
b/tests/system/providers/google/bigquery/example_bigquery_tables.py
index be7e909d55..fd62dcd5ce 100644
--- a/tests/system/providers/google/bigquery/example_bigquery_tables.py
+++ b/tests/system/providers/google/bigquery/example_bigquery_tables.py
@@ -101,7 +101,7 @@ with models.DAG(
         dataset_id=DATASET_NAME,
         table_id="test_materialized_view",
         materialized_view={
-            "query": f"SELECT SUM(salary) AS sum_salary " f"FROM 
`{PROJECT_ID}.{DATASET_NAME}.test_table`",
+            "query": f"SELECT SUM(salary) AS sum_salary FROM 
`{PROJECT_ID}.{DATASET_NAME}.test_table`",
             "enableRefresh": True,
             "refreshIntervalMs": 2000000,
         },

Reply via email to