This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch v2-3-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 019d14c12e92f070c087dca3081a4798f4181341
Author: Kamil BreguĊ‚a <[email protected]>
AuthorDate: Wed May 4 00:37:30 2022 +0200

    Bump pre-commit hook versions (#22887)
    
    (cherry picked from commit 2d109401b3566aef613501691d18cf7e4c776cd2)
---
 .pre-commit-config.yaml                                | 11 ++++++-----
 airflow/cli/commands/connection_command.py             |  8 ++------
 .../example_branch_python_dop_operator_3.py            |  4 +---
 airflow/operators/subdag.py                            | 11 +++--------
 airflow/providers/amazon/aws/operators/ecs.py          | 10 ++++------
 airflow/providers/amazon/aws/sensors/emr.py            | 10 ++++++----
 airflow/providers/apache/hive/operators/hive_stats.py  |  5 +++--
 .../dingding/example_dags/example_dingding.py          | 10 ++++------
 .../cloud/hooks/cloud_storage_transfer_service.py      |  5 ++---
 airflow/providers/influxdb/hooks/influxdb.py           |  8 +++-----
 .../microsoft/azure/operators/container_instances.py   |  2 +-
 airflow/providers/mongo/hooks/mongo.py                 | 10 +++-------
 airflow/providers/neo4j/hooks/neo4j.py                 |  7 +------
 dev/assign_cherry_picked_prs_with_milestone.py         |  2 +-
 dev/prepare_release_issue.py                           |  2 +-
 dev/provider_packages/prepare_provider_packages.py     |  8 ++++----
 scripts/ci/libraries/_md5sum.sh                        |  2 +-
 tests/executors/test_kubernetes_executor.py            |  7 +++----
 tests/models/test_dagbag.py                            | 18 ++++++------------
 tests/providers/amazon/aws/hooks/test_sagemaker.py     |  7 ++-----
 tests/providers/amazon/aws/sensors/test_emr_base.py    |  5 +++--
 tests/sensors/test_external_task_sensor.py             | 10 +++++-----
 22 files changed, 65 insertions(+), 97 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 47147bec8e..22b5a5dc2d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -39,7 +39,7 @@ repos:
           - "--maxlevel"
           - "2"
   - repo: https://github.com/Lucas-C/pre-commit-hooks
-    rev: v1.1.10
+    rev: v1.1.13
     hooks:
       - id: forbid-tabs
         name: Fail if tabs are used in the project
@@ -146,6 +146,7 @@ repos:
           - --fuzzy-match-generates-todo
         files: >
           
\.cfg$|\.conf$|\.ini$|\.ldif$|\.properties$|\.readthedocs$|\.service$|\.tf$|Dockerfile.*$
+  # Keep version of black in sync wit blackend-docs and pre-commit-hook-names
   - repo: https://github.com/psf/black
     rev: 22.3.0
     hooks:
@@ -161,7 +162,7 @@ repos:
         alias: black
         additional_dependencies: [black==22.3.0]
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.1.0
+    rev: v4.2.0
     hooks:
       - id: check-merge-conflict
         name: Check that merge conflicts are not being committed
@@ -203,7 +204,7 @@ repos:
         pass_filenames: true
   # TODO: Bump to Python 3.7 when support for Python 3.6 is dropped in Airflow 
2.3.
   - repo: https://github.com/asottile/pyupgrade
-    rev: v2.31.0
+    rev: v2.32.0
     hooks:
       - id: pyupgrade
         name: Upgrade Python code automatically
@@ -264,7 +265,7 @@ repos:
           ^airflow/_vendor/
         additional_dependencies: ['flake8>=4.0.1']
   - repo: https://github.com/ikamensh/flynt
-    rev: '0.69'
+    rev: '0.76'
     hooks:
       - id: flynt
         name: Run flynt string format converter for Python
@@ -546,7 +547,7 @@ repos:
       - id: run-shellcheck
         name: Check Shell scripts syntax correctness
         language: docker_image
-        entry: koalaman/shellcheck:v0.7.2 -x -a
+        entry: koalaman/shellcheck:v0.8.0 -x -a
         files: 
^breeze-legacy$|^breeze-complete$|\.sh$|^hooks/build$|^hooks/push$|\.bash$
         exclude: ^dev/breeze/autocomplete/.*$
       - id: lint-css
diff --git a/airflow/cli/commands/connection_command.py 
b/airflow/cli/commands/connection_command.py
index 9fbd524ff6..8a0c0a3acb 100644
--- a/airflow/cli/commands/connection_command.py
+++ b/airflow/cli/commands/connection_command.py
@@ -270,12 +270,8 @@ def connections_add(args):
                 or urlunparse(
                     (
                         new_conn.conn_type,
-                        '{login}:{password}@{host}:{port}'.format(
-                            login=new_conn.login or '',
-                            password='******' if new_conn.password else '',
-                            host=new_conn.host or '',
-                            port=new_conn.port or '',
-                        ),
+                        f"{new_conn.login or ''}:{'******' if 
new_conn.password else ''}"
+                        f"@{new_conn.host or ''}:{new_conn.port or ''}",
                         new_conn.schema or '',
                         '',
                         '',
diff --git a/airflow/example_dags/example_branch_python_dop_operator_3.py 
b/airflow/example_dags/example_branch_python_dop_operator_3.py
index a73c3bd755..a8e0ce2c1c 100644
--- a/airflow/example_dags/example_branch_python_dop_operator_3.py
+++ b/airflow/example_dags/example_branch_python_dop_operator_3.py
@@ -36,9 +36,7 @@ def should_run(**kwargs):
     :rtype: str
     """
     print(
-        '------------- exec dttm = {} and minute = {}'.format(
-            kwargs['execution_date'], kwargs['execution_date'].minute
-        )
+        f"------------- exec dttm = {kwargs['execution_date']} and minute = 
{kwargs['execution_date'].minute}"
     )
     if kwargs['execution_date'].minute % 2 == 0:
         return "empty_task_1"
diff --git a/airflow/operators/subdag.py b/airflow/operators/subdag.py
index 217ec5c7cd..bd81314dda 100644
--- a/airflow/operators/subdag.py
+++ b/airflow/operators/subdag.py
@@ -115,14 +115,9 @@ class SubDagOperator(BaseSensorOperator):
                 pool = session.query(Pool).filter(Pool.slots == 
1).filter(Pool.pool == self.pool).first()
                 if pool and any(t.pool == self.pool for t in 
self.subdag.tasks):
                     raise AirflowException(
-                        'SubDagOperator {sd} and subdag task{plural} {t} both '
-                        'use pool {p}, but the pool only has 1 slot. The '
-                        'subdag tasks will never run.'.format(
-                            sd=self.task_id,
-                            plural=len(conflicts) > 1,
-                            t=', '.join(t.task_id for t in conflicts),
-                            p=self.pool,
-                        )
+                        f"SubDagOperator {self.task_id} and subdag task{'s' if 
len(conflicts) > 1 else ''} "
+                        f"{', '.join(t.task_id for t in conflicts)} both use 
pool {self.pool}, "
+                        f"but the pool only has 1 slot. The subdag tasks will 
never run."
                     )
 
     def _get_dagrun(self, execution_date):
diff --git a/airflow/providers/amazon/aws/operators/ecs.py 
b/airflow/providers/amazon/aws/operators/ecs.py
index b6bae92388..7b23d77de7 100644
--- a/airflow/providers/amazon/aws/operators/ecs.py
+++ b/airflow/providers/amazon/aws/operators/ecs.py
@@ -466,9 +466,8 @@ class EcsOperator(BaseOperator):
             # 
https://docs.aws.amazon.com/AmazonECS/latest/developerguide/stopped-task-errors.html
             if re.match(r'Host EC2 \(instance .+?\) (stopped|terminated)\.', 
task.get('stoppedReason', '')):
                 raise AirflowException(
-                    'The task was stopped because the host instance 
terminated: {}'.format(
-                        task.get('stoppedReason', '')
-                    )
+                    f"The task was stopped because the host instance 
terminated:"
+                    f" {task.get('stoppedReason', '')}"
                 )
             containers = task['containers']
             for container in containers:
@@ -487,9 +486,8 @@ class EcsOperator(BaseOperator):
                     raise AirflowException(f'This task is still pending 
{task}')
                 elif 'error' in container.get('reason', '').lower():
                     raise AirflowException(
-                        'This containers encounter an error during launching : 
{}'.format(
-                            container.get('reason', '').lower()
-                        )
+                        f"This containers encounter an error during launching: 
"
+                        f"{container.get('reason', '').lower()}"
                     )
 
     def get_hook(self) -> AwsBaseHook:
diff --git a/airflow/providers/amazon/aws/sensors/emr.py 
b/airflow/providers/amazon/aws/sensors/emr.py
index 2204efcfae..c1f4a449a4 100644
--- a/airflow/providers/amazon/aws/sensors/emr.py
+++ b/airflow/providers/amazon/aws/sensors/emr.py
@@ -258,8 +258,9 @@ class EmrJobFlowSensor(EmrBaseSensor):
         cluster_status = response['Cluster']['Status']
         state_change_reason = cluster_status.get('StateChangeReason')
         if state_change_reason:
-            return 'for code: {} with message {}'.format(
-                state_change_reason.get('Code', 'No code'), 
state_change_reason.get('Message', 'Unknown')
+            return (
+                f"for code: {state_change_reason.get('Code', 'No code')} "
+                f"with message {state_change_reason.get('Message', 'Unknown')}"
             )
         return None
 
@@ -338,7 +339,8 @@ class EmrStepSensor(EmrBaseSensor):
         """
         fail_details = response['Step']['Status'].get('FailureDetails')
         if fail_details:
-            return 'for reason {} with message {} and log file {}'.format(
-                fail_details.get('Reason'), fail_details.get('Message'), 
fail_details.get('LogFile')
+            return (
+                f"for reason {fail_details.get('Reason')} "
+                f"with message {fail_details.get('Message')} and log file 
{fail_details.get('LogFile')}"
             )
         return None
diff --git a/airflow/providers/apache/hive/operators/hive_stats.py 
b/airflow/providers/apache/hive/operators/hive_stats.py
index 56c0f304af..a1b5539622 100644
--- a/airflow/providers/apache/hive/operators/hive_stats.py
+++ b/airflow/providers/apache/hive/operators/hive_stats.py
@@ -76,8 +76,9 @@ class HiveStatsCollectionOperator(BaseOperator):
     ) -> None:
         if 'col_blacklist' in kwargs:
             warnings.warn(
-                'col_blacklist kwarg passed to {c} (task_id: {t}) is 
deprecated, please rename it to '
-                'excluded_columns instead'.format(c=self.__class__.__name__, 
t=kwargs.get('task_id')),
+                f"col_blacklist kwarg passed to {self.__class__.__name__} "
+                f"(task_id: {kwargs.get('task_id')}) is deprecated, "
+                f"please rename it to excluded_columns instead",
                 category=FutureWarning,
                 stacklevel=2,
             )
diff --git a/airflow/providers/dingding/example_dags/example_dingding.py 
b/airflow/providers/dingding/example_dags/example_dingding.py
index 7740d157ce..e57409e740 100644
--- a/airflow/providers/dingding/example_dags/example_dingding.py
+++ b/airflow/providers/dingding/example_dags/example_dingding.py
@@ -32,12 +32,10 @@ def failure_callback(context):
     :param context: The context of the executed task.
     """
     message = (
-        'AIRFLOW TASK FAILURE TIPS:\n'
-        'DAG:    {}\n'
-        'TASKS:  {}\n'
-        'Reason: {}\n'.format(
-            context['task_instance'].dag_id, context['task_instance'].task_id, 
context['exception']
-        )
+        f"AIRFLOW TASK FAILURE TIPS:\n"
+        f"DAG:    {context['task_instance'].dag_id}\n"
+        f"TASKS:  {context['task_instance'].task_id}\n"
+        f"Reason: {context['exception']}\n"
     )
     return DingdingOperator(
         task_id='dingding_success_callback',
diff --git 
a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py 
b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
index ea6764bd75..04b57db0b4 100644
--- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
@@ -495,8 +495,7 @@ class CloudDataTransferServiceHook(GoogleBaseHook):
 
         if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES):
             raise AirflowException(
-                'An unexpected operation status was encountered. Expected: 
{}'.format(
-                    ", ".join(expected_statuses_set)
-                )
+                f"An unexpected operation status was encountered. "
+                f"Expected: {', '.join(expected_statuses_set)}"
             )
         return False
diff --git a/airflow/providers/influxdb/hooks/influxdb.py 
b/airflow/providers/influxdb/hooks/influxdb.py
index c70369d87e..a72f862ba5 100644
--- a/airflow/providers/influxdb/hooks/influxdb.py
+++ b/airflow/providers/influxdb/hooks/influxdb.py
@@ -68,11 +68,9 @@ class InfluxDBHook(BaseHook):
         based on SSL or other InfluxDB host requirements
 
         """
-        return '{scheme}://{host}:{port}'.format(
-            scheme='https' if conn.schema is None else f'{conn.schema}',
-            host=conn.host,
-            port='7687' if conn.port is None else f'{conn.port}',
-        )
+        conn_scheme = 'https' if conn.schema is None else conn.schema
+        conn_port = 7687 if conn.port is None else conn.port
+        return f"{conn_scheme}://{conn.host}:{conn_port}"
 
     def get_conn(self) -> InfluxDBClient:
         """
diff --git a/airflow/providers/microsoft/azure/operators/container_instances.py 
b/airflow/providers/microsoft/azure/operators/container_instances.py
index b234fc68c4..519ce8fe41 100644
--- a/airflow/providers/microsoft/azure/operators/container_instances.py
+++ b/airflow/providers/microsoft/azure/operators/container_instances.py
@@ -212,7 +212,7 @@ class AzureContainerInstancesOperator(BaseOperator):
         for conn_id, account_name, share_name, mount_path, read_only in 
self.volumes:
             hook = AzureContainerVolumeHook(conn_id)
 
-            mount_name = "mount-%d" % len(volumes)
+            mount_name = f"mount-{len(volumes)}"
             volumes.append(hook.get_file_volume(mount_name, share_name, 
account_name, read_only))
             volume_mounts.append(VolumeMount(name=mount_name, 
mount_path=mount_path, read_only=read_only))
 
diff --git a/airflow/providers/mongo/hooks/mongo.py 
b/airflow/providers/mongo/hooks/mongo.py
index 8f273e4789..96a5ec8003 100644
--- a/airflow/providers/mongo/hooks/mongo.py
+++ b/airflow/providers/mongo/hooks/mongo.py
@@ -60,13 +60,9 @@ class MongoHook(BaseHook):
         srv = self.extras.pop('srv', False)
         scheme = 'mongodb+srv' if srv else 'mongodb'
 
-        self.uri = '{scheme}://{creds}{host}{port}/{database}'.format(
-            scheme=scheme,
-            creds=f'{self.connection.login}:{self.connection.password}@' if 
self.connection.login else '',
-            host=self.connection.host,
-            port='' if self.connection.port is None else 
f':{self.connection.port}',
-            database=self.connection.schema,
-        )
+        creds = f'{self.connection.login}:{self.connection.password}@' if 
self.connection.login else ''
+        port = '' if self.connection.port is None else 
f':{self.connection.port}'
+        self.uri = 
f'{scheme}://{creds}{self.connection.host}{port}/{self.connection.schema}'
 
     def __enter__(self):
         return self
diff --git a/airflow/providers/neo4j/hooks/neo4j.py 
b/airflow/providers/neo4j/hooks/neo4j.py
index 399be2c63a..ad9ce4ac3c 100644
--- a/airflow/providers/neo4j/hooks/neo4j.py
+++ b/airflow/providers/neo4j/hooks/neo4j.py
@@ -91,12 +91,7 @@ class Neo4jHook(BaseHook):
         elif trusted_ca:
             encryption_scheme = '+s'
 
-        return '{scheme}{encryption_scheme}://{host}:{port}'.format(
-            scheme=scheme,
-            encryption_scheme=encryption_scheme,
-            host=conn.host,
-            port='7687' if conn.port is None else f'{conn.port}',
-        )
+        return f"{scheme}{encryption_scheme}://{conn.host}:{7687 if conn.port 
is None else conn.port}"
 
     def run(self, query) -> Result:
         """
diff --git a/dev/assign_cherry_picked_prs_with_milestone.py 
b/dev/assign_cherry_picked_prs_with_milestone.py
index e34ff53e32..58bbd38d67 100755
--- a/dev/assign_cherry_picked_prs_with_milestone.py
+++ b/dev/assign_cherry_picked_prs_with_milestone.py
@@ -222,7 +222,7 @@ def get_changes(verbose: bool, previous_release: str, 
current_release: str) -> L
     change_strings = subprocess.check_output(
         get_git_log_command(verbose, from_commit=previous_release, 
to_commit=current_release),
         cwd=SOURCE_DIR_PATH,
-        universal_newlines=True,
+        text=True,
     )
     return [get_change_from_line(line) for line in change_strings.split("\n")]
 
diff --git a/dev/prepare_release_issue.py b/dev/prepare_release_issue.py
index 58bb865b16..e61fad7445 100755
--- a/dev/prepare_release_issue.py
+++ b/dev/prepare_release_issue.py
@@ -166,7 +166,7 @@ def get_changes(
             verbose, from_commit=previous_release, to_commit=current_release, 
is_helm_chart=is_helm_chart
         ),
         cwd=SOURCE_DIR_PATH,
-        universal_newlines=True,
+        text=True,
     )
     return [get_change_from_line(line) for line in change_strings.split("\n")]
 
diff --git a/dev/provider_packages/prepare_provider_packages.py 
b/dev/provider_packages/prepare_provider_packages.py
index 319f4447fd..4aecdfc308 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -1318,7 +1318,7 @@ def get_all_changes_for_package(
         changes = subprocess.check_output(
             get_git_log_command(verbose, HEAD_OF_HTTPS_REMOTE, 
current_tag_no_suffix),
             cwd=source_provider_package_path,
-            universal_newlines=True,
+            text=True,
         )
         if changes:
             provider_details = get_provider_details(provider_package_id)
@@ -1332,7 +1332,7 @@ def get_all_changes_for_package(
                     changes_since_last_doc_only_check = 
subprocess.check_output(
                         get_git_log_command(verbose, HEAD_OF_HTTPS_REMOTE, 
last_doc_only_hash),
                         cwd=source_provider_package_path,
-                        universal_newlines=True,
+                        text=True,
                     )
                     if not changes_since_last_doc_only_check:
                         console.print()
@@ -1385,7 +1385,7 @@ def get_all_changes_for_package(
         changes = subprocess.check_output(
             get_git_log_command(verbose, next_version_tag, version_tag),
             cwd=source_provider_package_path,
-            universal_newlines=True,
+            text=True,
         )
         changes_table_for_version, array_of_changes_for_version = 
convert_git_changes_to_table(
             current_version, changes, 
base_url="https://github.com/apache/airflow/commit/";, markdown=False
@@ -1397,7 +1397,7 @@ def get_all_changes_for_package(
     changes = subprocess.check_output(
         get_git_log_command(verbose, next_version_tag),
         cwd=source_provider_package_path,
-        universal_newlines=True,
+        text=True,
     )
     changes_table_for_version, array_of_changes_for_version = 
convert_git_changes_to_table(
         current_version, changes, 
base_url="https://github.com/apache/airflow/commit/";, markdown=False
diff --git a/scripts/ci/libraries/_md5sum.sh b/scripts/ci/libraries/_md5sum.sh
index 0a90288952..1838936fc9 100644
--- a/scripts/ci/libraries/_md5sum.sh
+++ b/scripts/ci/libraries/_md5sum.sh
@@ -35,7 +35,7 @@ function md5sum::calculate_file_md5sum {
     echo "${md5sum}" > "${md5sum_file_new}"
     local ret_code=0
     if [[ ! -f "${md5sum_file}" ]]; then
-        verbosity::print_info "Missing md5sum for ${file#${AIRFLOW_SOURCES}} 
(${md5sum_file#${AIRFLOW_SOURCES}})"
+        verbosity::print_info "Missing md5sum for ${file#"${AIRFLOW_SOURCES}"} 
(${md5sum_file#"${AIRFLOW_SOURCES}"})"
         ret_code=1
     else
         diff "${md5sum_file_new}" "${md5sum_file}" >/dev/null
diff --git a/tests/executors/test_kubernetes_executor.py 
b/tests/executors/test_kubernetes_executor.py
index 018a94c4d7..541e8453b8 100644
--- a/tests/executors/test_kubernetes_executor.py
+++ b/tests/executors/test_kubernetes_executor.py
@@ -917,8 +917,7 @@ class TestKubernetesJobWatcher(unittest.TestCase):
         self.events.append({"type": "ERROR", "object": self.pod, "raw_object": 
raw_object})
         with self.assertRaises(AirflowException) as e:
             self._run()
-        assert str(e.exception) == 'Kubernetes failure for {} with code {} and 
message: {}'.format(
-            raw_object['reason'],
-            raw_object['code'],
-            raw_object['message'],
+        assert str(e.exception) == (
+            f"Kubernetes failure for {raw_object['reason']} "
+            f"with code {raw_object['code']} and message: 
{raw_object['message']}"
         )
diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py
index 8017be66f0..e4320d1068 100644
--- a/tests/models/test_dagbag.py
+++ b/tests/models/test_dagbag.py
@@ -436,19 +436,13 @@ class TestDagBag:
 
         for dag_id in expected_dag_ids:
             actual_dagbag.log.info(f'validating {dag_id}')
-            assert (
-                dag_id in actual_found_dag_ids
-            ) == should_be_found, 'dag "{}" should {}have been found after 
processing dag "{}"'.format(
-                dag_id,
-                '' if should_be_found else 'not ',
-                expected_parent_dag.dag_id,
+            assert (dag_id in actual_found_dag_ids) == should_be_found, (
+                f"dag \"{dag_id}\" should {'' if should_be_found else 'not '}"
+                f"have been found after processing dag 
\"{expected_parent_dag.dag_id}\""
             )
-            assert (
-                dag_id in actual_dagbag.dags
-            ) == should_be_found, 'dag "{}" should {}be in dagbag.dags after 
processing dag "{}"'.format(
-                dag_id,
-                '' if should_be_found else 'not ',
-                expected_parent_dag.dag_id,
+            assert (dag_id in actual_dagbag.dags) == should_be_found, (
+                f"dag \"{dag_id}\" should {'' if should_be_found else 'not '}"
+                f"be in dagbag.dags after processing dag 
\"{expected_parent_dag.dag_id}\""
             )
 
     def test_load_subdags(self):
diff --git a/tests/providers/amazon/aws/hooks/test_sagemaker.py 
b/tests/providers/amazon/aws/hooks/test_sagemaker.py
index dff84f91ed..0ae53f0fce 100644
--- a/tests/providers/amazon/aws/hooks/test_sagemaker.py
+++ b/tests/providers/amazon/aws/hooks/test_sagemaker.py
@@ -528,11 +528,8 @@ class TestSageMakerHook(unittest.TestCase):
     def test_secondary_training_status_message_status_changed(self):
         now = datetime.now(tzlocal())
         SECONDARY_STATUS_DESCRIPTION_1['LastModifiedTime'] = now
-        expected = '{} {} - {}'.format(
-            
datetime.utcfromtimestamp(time.mktime(now.timetuple())).strftime('%Y-%m-%d 
%H:%M:%S'),
-            status,
-            message,
-        )
+        expected_time = 
datetime.utcfromtimestamp(time.mktime(now.timetuple())).strftime('%Y-%m-%d 
%H:%M:%S')
+        expected = f"{expected_time} {status} - {message}"
         assert (
             secondary_training_status_message(SECONDARY_STATUS_DESCRIPTION_1, 
SECONDARY_STATUS_DESCRIPTION_2)
             == expected
diff --git a/tests/providers/amazon/aws/sensors/test_emr_base.py 
b/tests/providers/amazon/aws/sensors/test_emr_base.py
index 53bdf1a86d..4ca1bb5cc8 100644
--- a/tests/providers/amazon/aws/sensors/test_emr_base.py
+++ b/tests/providers/amazon/aws/sensors/test_emr_base.py
@@ -52,8 +52,9 @@ class EmrBaseSensorSubclass(EmrBaseSensor):
     def failure_message_from_response(response):
         change_reason = response['SomeKey'].get('StateChangeReason')
         if change_reason:
-            return 'for code: {} with message {}'.format(
-                change_reason.get('Code', EMPTY_CODE), 
change_reason.get('Message', 'Unknown')
+            return (
+                f"for code: {change_reason.get('Code', EMPTY_CODE)} "
+                f"with message {change_reason.get('Message', 'Unknown')}"
             )
         return None
 
diff --git a/tests/sensors/test_external_task_sensor.py 
b/tests/sensors/test_external_task_sensor.py
index 658fe69a9c..8725d76081 100644
--- a/tests/sensors/test_external_task_sensor.py
+++ b/tests/sensors/test_external_task_sensor.py
@@ -125,8 +125,8 @@ class TestExternalTaskSensor(unittest.TestCase):
             with pytest.raises(AirflowException) as ctx:
                 op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)
             assert (
-                'INFO:airflow.task.operators:Poking for tasks 
[\'time_sensor_check\']'
-                ' in dag unit_test_dag on %s ... ' % DEFAULT_DATE.isoformat() 
in cm.output
+                f'INFO:airflow.task.operators:Poking for tasks 
[\'time_sensor_check\'] '
+                f'in dag unit_test_dag on {DEFAULT_DATE.isoformat()} ... ' in 
cm.output
             )
             assert (
                 str(ctx.value) == "Some of the external tasks "
@@ -191,9 +191,9 @@ class TestExternalTaskSensor(unittest.TestCase):
             with pytest.raises(AirflowException) as ctx:
                 op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)
             assert (
-                'INFO:airflow.task.operators:Poking for tasks '
-                '[\'time_sensor_check\', \'time_sensor_check_alternate\'] '
-                'in dag unit_test_dag on %s ... ' % DEFAULT_DATE.isoformat() 
in cm.output
+                f'INFO:airflow.task.operators:Poking for tasks '
+                f'[\'time_sensor_check\', \'time_sensor_check_alternate\'] '
+                f'in dag unit_test_dag on {DEFAULT_DATE.isoformat()} ... ' in 
cm.output
             )
             assert (
                 str(ctx.value) == "Some of the external tasks "

Reply via email to