This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 92ddcf4ac6 Introduce `flake8-implicit-str-concat` plugin to static 
checks (#23873)
92ddcf4ac6 is described below

commit 92ddcf4ac6fa452c5056b1f7cad1fca4d5759802
Author: Josh Fell <[email protected]>
AuthorDate: Fri May 27 12:53:34 2022 -0400

    Introduce `flake8-implicit-str-concat` plugin to static checks (#23873)
---
 airflow/configuration.py                                     |  2 +-
 airflow/providers/amazon/aws/hooks/eks.py                    |  2 +-
 airflow/providers/amazon/aws/hooks/ses.py                    |  2 +-
 airflow/providers/amazon/aws/hooks/sns.py                    |  2 +-
 airflow/providers/amazon/aws/hooks/sqs.py                    |  2 +-
 airflow/providers/databricks/operators/databricks_repos.py   |  2 +-
 airflow/providers/google/cloud/hooks/kubernetes_engine.py    |  6 ++++--
 airflow/providers/google/cloud/utils/credentials_provider.py |  2 +-
 airflow/providers/microsoft/psrp/operators/psrp.py           |  2 +-
 airflow/utils/email.py                                       |  2 +-
 airflow/utils/file.py                                        |  2 +-
 airflow/www/fab_security/manager.py                          |  8 ++------
 airflow/www/views.py                                         |  2 +-
 dev/breeze/src/airflow_breeze/utils/docker_command_utils.py  |  2 +-
 dev/breeze/src/airflow_breeze/utils/image.py                 |  4 ++--
 dev/breeze/src/airflow_breeze/utils/run_utils.py             |  2 +-
 dev/system_tests/update_issue_status.py                      |  2 +-
 setup.py                                                     |  1 +
 tests/always/test_connection.py                              |  2 +-
 tests/api_connexion/endpoints/test_task_instance_endpoint.py |  4 ++--
 tests/core/test_providers_manager.py                         |  2 +-
 tests/providers/amazon/aws/hooks/test_glacier.py             | 12 ++++++------
 tests/providers/databricks/operators/test_databricks.py      |  4 ++--
 tests/providers/google/cloud/hooks/test_datacatalog.py       |  4 ++--
 tests/providers/google/cloud/operators/test_datacatalog.py   |  4 ++--
 tests/providers/google/cloud/operators/test_mlengine.py      |  4 ++--
 tests/providers/google/cloud/transfers/test_mysql_to_gcs.py  |  6 ++++--
 .../google/cloud/utils/test_credentials_provider.py          |  8 ++++----
 tests/providers/google/common/hooks/test_base_google.py      |  2 +-
 tests/providers/ssh/hooks/test_ssh.py                        |  2 +-
 .../providers/google/bigquery/example_bigquery_sensors.py    |  2 +-
 tests/utils/test_file.py                                     |  7 +++----
 32 files changed, 55 insertions(+), 55 deletions(-)

diff --git a/airflow/configuration.py b/airflow/configuration.py
index a33b3f3998..729e780f74 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -391,7 +391,7 @@ class AirflowConfigParser(ConfigParser):
                 if value not in enum_options:
                     raise AirflowConfigException(
                         f"`[{section_key}] {option_key}` should not be "
-                        + f"{value!r}. Possible values: {', 
'.join(enum_options)}."
+                        f"{value!r}. Possible values: {', 
'.join(enum_options)}."
                     )
 
     def _validate_config_dependencies(self):
diff --git a/airflow/providers/amazon/aws/hooks/eks.py 
b/airflow/providers/amazon/aws/hooks/eks.py
index 1475d3d1f5..d2a795e498 100644
--- a/airflow/providers/amazon/aws/hooks/eks.py
+++ b/airflow/providers/amazon/aws/hooks/eks.py
@@ -638,7 +638,7 @@ class EKSHook(EksHook):
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
-            "This hook is deprecated. " "Please use 
`airflow.providers.amazon.aws.hooks.eks.EksHook`.",
+            "This hook is deprecated. Please use 
`airflow.providers.amazon.aws.hooks.eks.EksHook`.",
             DeprecationWarning,
             stacklevel=2,
         )
diff --git a/airflow/providers/amazon/aws/hooks/ses.py 
b/airflow/providers/amazon/aws/hooks/ses.py
index 21efa14107..92dcce7ecb 100644
--- a/airflow/providers/amazon/aws/hooks/ses.py
+++ b/airflow/providers/amazon/aws/hooks/ses.py
@@ -106,7 +106,7 @@ class SESHook(SesHook):
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
-            "This hook is deprecated. " "Please use 
:class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.",
+            "This hook is deprecated. Please use 
:class:`airflow.providers.amazon.aws.hooks.ses.SesHook`.",
             DeprecationWarning,
             stacklevel=2,
         )
diff --git a/airflow/providers/amazon/aws/hooks/sns.py 
b/airflow/providers/amazon/aws/hooks/sns.py
index 94c83d8d31..fc009d9f9b 100644
--- a/airflow/providers/amazon/aws/hooks/sns.py
+++ b/airflow/providers/amazon/aws/hooks/sns.py
@@ -100,7 +100,7 @@ class AwsSnsHook(SnsHook):
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
-            "This hook is deprecated. " "Please use 
:class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.",
+            "This hook is deprecated. Please use 
:class:`airflow.providers.amazon.aws.hooks.sns.SnsHook`.",
             DeprecationWarning,
             stacklevel=2,
         )
diff --git a/airflow/providers/amazon/aws/hooks/sqs.py 
b/airflow/providers/amazon/aws/hooks/sqs.py
index c6de9f9048..b94756f63a 100644
--- a/airflow/providers/amazon/aws/hooks/sqs.py
+++ b/airflow/providers/amazon/aws/hooks/sqs.py
@@ -88,7 +88,7 @@ class SQSHook(SqsHook):
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
-            "This hook is deprecated. " "Please use 
:class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.",
+            "This hook is deprecated. Please use 
:class:`airflow.providers.amazon.aws.hooks.sqs.SqsHook`.",
             DeprecationWarning,
             stacklevel=2,
         )
diff --git a/airflow/providers/databricks/operators/databricks_repos.py 
b/airflow/providers/databricks/operators/databricks_repos.py
index 982adcfee2..97b46b8e24 100644
--- a/airflow/providers/databricks/operators/databricks_repos.py
+++ b/airflow/providers/databricks/operators/databricks_repos.py
@@ -90,7 +90,7 @@ class DatabricksReposCreateOperator(BaseOperator):
             self.git_provider = self.__detect_repo_provider__(git_url)
             if self.git_provider is None:
                 raise AirflowException(
-                    "git_provider isn't specified and couldn't be guessed for 
URL {git_url}"
+                    f"git_provider isn't specified and couldn't be guessed for 
URL {git_url}"
                 )
         else:
             self.git_provider = git_provider
diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py 
b/airflow/providers/google/cloud/hooks/kubernetes_engine.py
index 4cfb4fe6fb..31c6c6c1fd 100644
--- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py
+++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py
@@ -123,8 +123,10 @@ class GKEHook(GoogleBaseHook):
         :return: The new, updated operation from Google Cloud
         """
         return self.get_cluster_manager_client().get_operation(
-            name=f'projects/{project_id or self.project_id}'
-            + f'/locations/{self.location}/operations/{operation_name}'
+            name=(
+                f'projects/{project_id or self.project_id}'
+                f'/locations/{self.location}/operations/{operation_name}'
+            )
         )
 
     @staticmethod
diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py 
b/airflow/providers/google/cloud/utils/credentials_provider.py
index ac1cfb42a3..0a8143ceae 100644
--- a/airflow/providers/google/cloud/utils/credentials_provider.py
+++ b/airflow/providers/google/cloud/utils/credentials_provider.py
@@ -201,7 +201,7 @@ class _CredentialProvider(LoggingMixin):
         key_options = [key_path, key_secret_name, keyfile_dict]
         if len([x for x in key_options if x]) > 1:
             raise AirflowException(
-                "The `keyfile_dict`, `key_path`, and `key_secret_name` fields"
+                "The `keyfile_dict`, `key_path`, and `key_secret_name` fields "
                 "are all mutually exclusive. Please provide only one value."
             )
         self.key_path = key_path
diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py 
b/airflow/providers/microsoft/psrp/operators/psrp.py
index 5ec70f292f..ea07ee9115 100644
--- a/airflow/providers/microsoft/psrp/operators/psrp.py
+++ b/airflow/providers/microsoft/psrp/operators/psrp.py
@@ -164,7 +164,7 @@ class PsrpOperator(BaseOperator):
         def securestring(value: str):
             if not native:
                 raise AirflowException(
-                    "Filter 'securestring' not applicable to non-native " 
"templating environment"
+                    "Filter 'securestring' not applicable to non-native 
templating environment"
                 )
             return TaggedValue("SS", value)
 
diff --git a/airflow/utils/email.py b/airflow/utils/email.py
index ec0095e983..868574379c 100644
--- a/airflow/utils/email.py
+++ b/airflow/utils/email.py
@@ -99,7 +99,7 @@ def send_email_smtp(
     else:
         if from_email is None:
             raise Exception(
-                "You should set from email - either by smtp/smtp_mail_from 
config or " "`from_email parameter"
+                "You should set from email - either by smtp/smtp_mail_from 
config or `from_email` parameter"
             )
         mail_from = from_email
 
diff --git a/airflow/utils/file.py b/airflow/utils/file.py
index 5a3db7fd48..db786a5d88 100644
--- a/airflow/utils/file.py
+++ b/airflow/utils/file.py
@@ -241,7 +241,7 @@ def _find_path_from_directory(
             if dirpath in patterns_by_dir:
                 raise RuntimeError(
                     "Detected recursive loop when walking DAG directory "
-                    + f"{base_dir_path}: {dirpath} has appeared more than 
once."
+                    f"{base_dir_path}: {dirpath} has appeared more than once."
                 )
             patterns_by_dir.update({dirpath: patterns.copy()})
 
diff --git a/airflow/www/fab_security/manager.py 
b/airflow/www/fab_security/manager.py
index f56e9fec6e..34c288c084 100644
--- a/airflow/www/fab_security/manager.py
+++ b/airflow/www/fab_security/manager.py
@@ -221,9 +221,7 @@ class BaseSecurityManager:
         # LDAP Config
         if self.auth_type == AUTH_LDAP:
             if "AUTH_LDAP_SERVER" not in app.config:
-                raise Exception(
-                    "No AUTH_LDAP_SERVER defined on config" " with AUTH_LDAP 
authentication type."
-                )
+                raise Exception("No AUTH_LDAP_SERVER defined on config with 
AUTH_LDAP authentication type.")
             app.config.setdefault("AUTH_LDAP_SEARCH", "")
             app.config.setdefault("AUTH_LDAP_SEARCH_FILTER", "")
             app.config.setdefault("AUTH_LDAP_APPEND_DOMAIN", "")
@@ -971,9 +969,7 @@ class BaseSecurityManager:
             con.simple_bind_s(self.auth_ldap_bind_user, 
self.auth_ldap_bind_password)
             log.debug("LDAP bind indirect SUCCESS with username: %r", 
self.auth_ldap_bind_user)
         except ldap.INVALID_CREDENTIALS as ex:
-            log.error(
-                "AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are" " not 
valid LDAP bind credentials"
-            )
+            log.error("AUTH_LDAP_BIND_USER and AUTH_LDAP_BIND_PASSWORD are not 
valid LDAP bind credentials")
             raise ex
 
     @staticmethod
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 29e15bfe16..04b955089f 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -1144,7 +1144,7 @@ class Airflow(AirflowBaseView):
         except Exception as e:
             all_errors += (
                 "Exception encountered during "
-                + f"dag_id retrieval/dag retrieval fallback/code 
highlighting:\n\n{e}\n"
+                f"dag_id retrieval/dag retrieval fallback/code 
highlighting:\n\n{e}\n"
             )
             html_code = Markup('<p>Failed to load DAG file 
Code.</p><p>Details: {}</p>').format(
                 escape(all_errors)
diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py 
b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
index 784290294b..b066d3827d 100644
--- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
@@ -370,7 +370,7 @@ def prepare_docker_build_cache_command(
     )
     final_command.extend(["--platform", image_params.platform])
     final_command.extend(
-        
[f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)},"
 f"mode=max"]
+        
[f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)},mode=max"]
     )
     cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
     buildx_command_result = run_command(cmd, verbose=verbose, dry_run=dry_run, 
text=True)
diff --git a/dev/breeze/src/airflow_breeze/utils/image.py 
b/dev/breeze/src/airflow_breeze/utils/image.py
index 4131b1c0bb..1962951f94 100644
--- a/dev/breeze/src/airflow_breeze/utils/image.py
+++ b/dev/breeze/src/airflow_breeze/utils/image.py
@@ -159,7 +159,7 @@ def run_pull_image(
 def tag_image_as_latest(image_params: _CommonBuildParams, dry_run: bool, 
verbose: bool) -> RunCommandResult:
     if image_params.airflow_image_name_with_tag == 
image_params.airflow_image_name:
         get_console().print(
-            f"[info]Skip tagging {image_params.airflow_image_name} " "as 
latest as it is already 'latest'[/]"
+            f"[info]Skip tagging {image_params.airflow_image_name} as latest 
as it is already 'latest'[/]"
         )
         return subprocess.CompletedProcess(returncode=0, args=[])
     return run_command(
@@ -251,7 +251,7 @@ def find_available_ci_image(github_repository: str, 
dry_run: bool, verbose: bool
         )
         if inspect_command_result.returncode == 0:
             get_console().print(
-                "[info]Running fix_ownership " f"with 
{shell_params.airflow_image_name_with_tag}.[/]"
+                f"[info]Running fix_ownership with 
{shell_params.airflow_image_name_with_tag}.[/]"
             )
             return shell_params
     shell_params, _ = just_pull_ci_image(
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py 
b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index 241407a1af..b811980f72 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -174,7 +174,7 @@ def assert_pre_commit_installed(verbose: bool):
                 sys.exit(1)
         else:
             get_console().print(
-                "\n[warning]Could not determine version of pre-commit. " "You 
might need to update it![/]\n"
+                "\n[warning]Could not determine version of pre-commit. You 
might need to update it![/]\n"
             )
     else:
         get_console().print("\n[error]Error checking for 
pre-commit-installation:[/]\n")
diff --git a/dev/system_tests/update_issue_status.py 
b/dev/system_tests/update_issue_status.py
index bc109fe1e5..d1bd9af566 100755
--- a/dev/system_tests/update_issue_status.py
+++ b/dev/system_tests/update_issue_status.py
@@ -176,7 +176,7 @@ def update_issue_status(
     console.print(f"   Re-added file number: {total_re_added}")
     console.print(f"   Completed file number: {total_completed}")
     console.print(
-        f"   Done {total_count_done}/{total_count_all} = " 
f"{(total_count_done * 100/ total_count_all):.2f}%"
+        f"   Done {total_count_done}/{total_count_all} = {(total_count_done * 
100/ total_count_all):.2f}%"
     )
     console.print()
 
diff --git a/setup.py b/setup.py
index 26ee186ec5..74f6131e62 100644
--- a/setup.py
+++ b/setup.py
@@ -617,6 +617,7 @@ devel_only = [
     'filelock',
     'flake8>=3.6.0',
     'flake8-colors',
+    'flake8-implicit-str-concat',
     'flaky',
     'freezegun',
     # Github3 version 3.1.2 requires PyJWT>=2.3.0 which clashes with Flask App 
Builder where PyJWT is <2.0.0
diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py
index 1368d4d03d..79327f2f90 100644
--- a/tests/always/test_connection.py
+++ b/tests/always/test_connection.py
@@ -143,7 +143,7 @@ class TestConnection(unittest.TestCase):
             description='with extras',
         ),
         UriTestCaseConfig(
-            
test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?' 
'__extra__=single+value',
+            
test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?__extra__=single+value',
             test_conn_attributes=dict(
                 conn_type='scheme',
                 host='host/location',
diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py 
b/tests/api_connexion/endpoints/test_task_instance_endpoint.py
index a7ffd6e803..b4fd87ae2e 100644
--- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py
+++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py
@@ -314,7 +314,7 @@ class TestGetTaskInstance(TestTaskInstanceEndpoint):
         for map_index in (1, 2):
             response = self.client.get(
                 
"/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances"
-                + f"/print_the_context/{map_index}",
+                f"/print_the_context/{map_index}",
                 environ_overrides={"REMOTE_USER": "test"},
             )
             assert response.status_code == 200
@@ -480,7 +480,7 @@ class TestGetTaskInstances(TestTaskInstanceEndpoint):
                     {"state": State.NONE},
                 ],
                 False,
-                ("/api/v1/dags/example_python_operator/dagRuns/" 
"TEST_DAG_RUN_ID/taskInstances"),
+                
("/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances"),
                 4,
             ),
             (
diff --git a/tests/core/test_providers_manager.py 
b/tests/core/test_providers_manager.py
index 4d97eb2fa0..a98dc9534e 100644
--- a/tests/core/test_providers_manager.py
+++ b/tests/core/test_providers_manager.py
@@ -238,5 +238,5 @@ class TestProviderManager:
                 hook_class_name=None, provider_info=None, package_name=None, 
connection_type="test_connection"
             )
             assert [
-                "Optional provider feature disabled when importing 'HookClass' 
from " "'test_package' package"
+                "Optional provider feature disabled when importing 'HookClass' 
from 'test_package' package"
             ] == self._caplog.messages
diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py 
b/tests/providers/amazon/aws/hooks/test_glacier.py
index c22620f3c0..864a29a554 100644
--- a/tests/providers/amazon/aws/hooks/test_glacier.py
+++ b/tests/providers/amazon/aws/hooks/test_glacier.py
@@ -58,11 +58,11 @@ class TestAmazonGlacierHook(unittest.TestCase):
                 log.output,
                 [
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Retrieving inventory for vault: {VAULT_NAME}",
+                    f"Retrieving inventory for vault: {VAULT_NAME}",
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Initiated inventory-retrieval job for: {VAULT_NAME}",
+                    f"Initiated inventory-retrieval job for: {VAULT_NAME}",
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Retrieval Job ID: {job_id.get('jobId')}",
+                    f"Retrieval Job ID: {job_id.get('jobId')}",
                 ],
             )
 
@@ -86,7 +86,7 @@ class TestAmazonGlacierHook(unittest.TestCase):
                 log.output,
                 [
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Retrieving the job results for vault: {VAULT_NAME}...",
+                    f"Retrieving the job results for vault: {VAULT_NAME}...",
                 ],
             )
 
@@ -110,8 +110,8 @@ class TestAmazonGlacierHook(unittest.TestCase):
                 log.output,
                 [
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Retrieving status for vault: {VAULT_NAME} and job 
{JOB_ID}",
+                    f"Retrieving status for vault: {VAULT_NAME} and job 
{JOB_ID}",
                     
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
-                    + f"Job status: {JOB_STATUS.get('Action')}, code status: 
{JOB_STATUS.get('StatusCode')}",
+                    f"Job status: {JOB_STATUS.get('Action')}, code status: 
{JOB_STATUS.get('StatusCode')}",
                 ],
             )
diff --git a/tests/providers/databricks/operators/test_databricks.py 
b/tests/providers/databricks/operators/test_databricks.py
index 97551eda03..b6cd10e073 100644
--- a/tests/providers/databricks/operators/test_databricks.py
+++ b/tests/providers/databricks/operators/test_databricks.py
@@ -201,7 +201,7 @@ class TestDatabricksSubmitRunOperator(unittest.TestCase):
         # Looks a bit weird since we have to escape regex reserved symbols.
         exception_message = (
             r'Type \<(type|class) \'datetime.datetime\'\> used '
-            + r'for parameter json\[test\] is not a number or a string'
+            r'for parameter json\[test\] is not a number or a string'
         )
         with pytest.raises(AirflowException, match=exception_message):
             DatabricksSubmitRunOperator(task_id=TASK_ID, json=json)
@@ -516,7 +516,7 @@ class TestDatabricksRunNowOperator(unittest.TestCase):
         # Looks a bit weird since we have to escape regex reserved symbols.
         exception_message = (
             r'Type \<(type|class) \'datetime.datetime\'\> used '
-            + r'for parameter json\[test\] is not a number or a string'
+            r'for parameter json\[test\] is not a number or a string'
         )
         with pytest.raises(AirflowException, match=exception_message):
             DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=json)
diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py 
b/tests/providers/google/cloud/hooks/test_datacatalog.py
index bcc2078a8f..f2d7e1db64 100644
--- a/tests/providers/google/cloud/hooks/test_datacatalog.py
+++ b/tests/providers/google/cloud/hooks/test_datacatalog.py
@@ -70,11 +70,11 @@ TEST_ENTRY_GROUP_PATH: str = 
f"projects/{{}}/locations/{TEST_LOCATION}/entryGrou
 TEST_TAG_TEMPLATE_PATH: str = 
f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/{TEST_TAG_TEMPLATE_ID}"
 TEST_TAG_TEMPLATE_FIELD_PATH: str = (
     f"projects/{{}}/locations/{TEST_LOCATION}/tagTemplates/"
-    + f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}"
+    f"{TEST_TAG_TEMPLATE_ID}/fields/{TEST_TAG_TEMPLATE_FIELD_ID}"
 )
 TEST_TAG_PATH: str = (
     
f"projects/{{}}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}"
-    + f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
+    f"/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
 )
 TEST_PROJECT_ID_1 = "example-project-1"
 TEST_PROJECT_ID_2 = "example-project-2"
diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py 
b/tests/providers/google/cloud/operators/test_datacatalog.py
index 24c5cc99b8..ff6f14a10f 100644
--- a/tests/providers/google/cloud/operators/test_datacatalog.py
+++ b/tests/providers/google/cloud/operators/test_datacatalog.py
@@ -76,7 +76,7 @@ TEST_ORDER_BY: str = "test-order-by"
 TEST_UPDATE_MASK: Dict = {"fields": ["name"]}
 TEST_ENTRY_PATH: str = (
     f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}"
-    + f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}"
+    f"/entryGroups/{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}"
 )
 TEST_ENTRY_GROUP_PATH: str = (
     
f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/{TEST_ENTRY_GROUP_ID}"
@@ -86,7 +86,7 @@ TEST_TAG_TEMPLATE_PATH: str = (
 )
 TEST_TAG_PATH: str = (
     f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}/entryGroups/"
-    + f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
+    f"{TEST_ENTRY_GROUP_ID}/entries/{TEST_ENTRY_ID}/tags/{TEST_TAG_ID}"
 )
 
 TEST_ENTRY: Entry = Entry(name=TEST_ENTRY_PATH)
diff --git a/tests/providers/google/cloud/operators/test_mlengine.py 
b/tests/providers/google/cloud/operators/test_mlengine.py
index af7a487f42..36cd5818f9 100644
--- a/tests/providers/google/cloud/operators/test_mlengine.py
+++ b/tests/providers/google/cloud/operators/test_mlengine.py
@@ -230,7 +230,7 @@ class 
TestMLEngineBatchPredictionOperator(unittest.TestCase):
         task_args['model_name'] = 'fake_model'
         with pytest.raises(AirflowException) as ctx:
             MLEngineStartBatchPredictionJobOperator(**task_args).execute(None)
-        assert 'Ambiguous model origin: Both uri and ' 'model/version name are 
provided.' == str(ctx.value)
+        assert 'Ambiguous model origin: Both uri and model/version name are 
provided.' == str(ctx.value)
 
         # Test that both uri and model/version is given
         task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy()
@@ -239,7 +239,7 @@ class 
TestMLEngineBatchPredictionOperator(unittest.TestCase):
         task_args['version_name'] = 'fake_version'
         with pytest.raises(AirflowException) as ctx:
             MLEngineStartBatchPredictionJobOperator(**task_args).execute(None)
-        assert 'Ambiguous model origin: Both uri and ' 'model/version name are 
provided.' == str(ctx.value)
+        assert 'Ambiguous model origin: Both uri and model/version name are 
provided.' == str(ctx.value)
 
         # Test that a version is given without a model
         task_args = self.BATCH_PREDICTION_DEFAULT_ARGS.copy()
diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py 
b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
index d6ddd4ddd2..c006c230d3 100644
--- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
@@ -47,12 +47,14 @@ NDJSON_LINES = [
     b'{"some_num": 44, "some_str": "mock_row_content_3"}\n',
 ]
 CSV_LINES = [
-    b'some_str,some_num\r\n' b'mock_row_content_1,42\r\n',
+    b'some_str,some_num\r\n',
+    b'mock_row_content_1,42\r\n',
     b'mock_row_content_2,43\r\n',
     b'mock_row_content_3,44\r\n',
 ]
 CSV_LINES_PIPE_DELIMITED = [
-    b'some_str|some_num\r\n' b'mock_row_content_1|42\r\n',
+    b'some_str|some_num\r\n',
+    b'mock_row_content_1|42\r\n',
     b'mock_row_content_2|43\r\n',
     b'mock_row_content_3|44\r\n',
 ]
diff --git a/tests/providers/google/cloud/utils/test_credentials_provider.py 
b/tests/providers/google/cloud/utils/test_credentials_provider.py
index d392b17093..c1fdd7268c 100644
--- a/tests/providers/google/cloud/utils/test_credentials_provider.py
+++ b/tests/providers/google/cloud/utils/test_credentials_provider.py
@@ -167,7 +167,7 @@ class TestGetGcpCredentialsAndProjectId(unittest.TestCase):
         assert mock_auth_default.return_value == result
 
     @mock.patch(
-        'airflow.providers.google.cloud.utils.credentials_provider.' 
'impersonated_credentials.Credentials'
+        
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
     )
     @mock.patch('google.auth.default')
     def 
test_get_credentials_and_project_id_with_default_auth_and_target_principal(
@@ -189,7 +189,7 @@ class TestGetGcpCredentialsAndProjectId(unittest.TestCase):
         assert (mock_impersonated_credentials.return_value, 
ANOTHER_PROJECT_ID) == result
 
     @mock.patch(
-        'airflow.providers.google.cloud.utils.credentials_provider.' 
'impersonated_credentials.Credentials'
+        
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
     )
     @mock.patch('google.auth.default')
     def 
test_get_credentials_and_project_id_with_default_auth_and_scopes_and_target_principal(
@@ -212,7 +212,7 @@ class TestGetGcpCredentialsAndProjectId(unittest.TestCase):
         assert (mock_impersonated_credentials.return_value, 
self.test_project_id) == result
 
     @mock.patch(
-        'airflow.providers.google.cloud.utils.credentials_provider.' 
'impersonated_credentials.Credentials'
+        
'airflow.providers.google.cloud.utils.credentials_provider.impersonated_credentials.Credentials'
     )
     @mock.patch('google.auth.default')
     def 
test_get_credentials_and_project_id_with_default_auth_and_target_principal_and_delegates(
@@ -312,7 +312,7 @@ class TestGetGcpCredentialsAndProjectId(unittest.TestCase):
         with pytest.raises(
             AirflowException,
             match=re.escape(
-                'The `keyfile_dict`, `key_path`, and `key_secret_name` 
fieldsare all mutually exclusive.'
+                'The `keyfile_dict`, `key_path`, and `key_secret_name` fields 
are all mutually exclusive.'
             ),
         ):
             get_credentials_and_project_id(key_path='KEY.json', 
keyfile_dict={'private_key': 'PRIVATE_KEY'})
diff --git a/tests/providers/google/common/hooks/test_base_google.py 
b/tests/providers/google/common/hooks/test_base_google.py
index bfb422965f..f833469412 100644
--- a/tests/providers/google/common/hooks/test_base_google.py
+++ b/tests/providers/google/common/hooks/test_base_google.py
@@ -448,7 +448,7 @@ class TestGoogleBaseHook(unittest.TestCase):
         with pytest.raises(
             AirflowException,
             match=re.escape(
-                "The `keyfile_dict`, `key_path`, and `key_secret_name` fields" 
"are all mutually exclusive. "
+                "The `keyfile_dict`, `key_path`, and `key_secret_name` fields 
are all mutually exclusive. "
             ),
         ):
             self.instance._get_credentials_and_project_id()
diff --git a/tests/providers/ssh/hooks/test_ssh.py 
b/tests/providers/ssh/hooks/test_ssh.py
index b17e3170a8..c248ebf45d 100644
--- a/tests/providers/ssh/hooks/test_ssh.py
+++ b/tests/providers/ssh/hooks/test_ssh.py
@@ -742,7 +742,7 @@ class TestSSHHook(unittest.TestCase):
 
     def test_oneline_key(self):
         with pytest.raises(Exception):
-            TEST_ONELINE_KEY = "-----BEGIN OPENSSH" + "PRIVATE 
KEY-----asdfg-----END OPENSSH PRIVATE KEY-----"
+            TEST_ONELINE_KEY = "-----BEGIN OPENSSHPRIVATE 
KEY-----asdfg-----END OPENSSHPRIVATE KEY-----"
             session = settings.Session()
             try:
                 conn = Connection(
diff --git a/tests/system/providers/google/bigquery/example_bigquery_sensors.py 
b/tests/system/providers/google/bigquery/example_bigquery_sensors.py
index 21d9f530c0..edd9db51ba 100644
--- a/tests/system/providers/google/bigquery/example_bigquery_sensors.py
+++ b/tests/system/providers/google/bigquery/example_bigquery_sensors.py
@@ -48,7 +48,7 @@ INSERT_DATE = datetime.now().strftime("%Y-%m-%d")
 
 PARTITION_NAME = "{{ ds_nodash }}"
 
-INSERT_ROWS_QUERY = f"INSERT {DATASET_NAME}.{TABLE_NAME} VALUES " "(42, '{{ ds 
}}')"
+INSERT_ROWS_QUERY = f"INSERT {DATASET_NAME}.{TABLE_NAME} VALUES (42, '{{ ds 
}}')"
 
 SCHEMA = [
     {"name": "value", "type": "INTEGER", "mode": "REQUIRED"},
diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py
index 99f7e90a7d..3d94f36228 100644
--- a/tests/utils/test_file.py
+++ b/tests/utils/test_file.py
@@ -159,8 +159,7 @@ class TestListPyFilesPath:
             list(find_path_from_directory(test_dir, ignore_list_file, 
ignore_file_syntax="glob"))
             assert False, "Walking a self-recursive tree should fail"
         except RuntimeError as err:
-            assert (
-                str(err)
-                == f"Detected recursive loop when walking DAG directory 
{test_dir}: "
-                + f"{Path(recursing_tgt).resolve()} has appeared more than 
once."
+            assert str(err) == (
+                f"Detected recursive loop when walking DAG directory 
{test_dir}: "
+                f"{Path(recursing_tgt).resolve()} has appeared more than once."
             )

Reply via email to