This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c585ad51c5 Upgrade ruff to 0.0.262 (#30809)
c585ad51c5 is described below

commit c585ad51c522c6e9f3bbbf7ae6e0132e25a3a378
Author: Jarek Potiuk <[email protected]>
AuthorDate: Sat Apr 22 15:13:58 2023 +0200

    Upgrade ruff to 0.0.262 (#30809)
---
 .pre-commit-config.yaml                                    |  2 +-
 airflow/kubernetes/pod.py                                  |  2 +-
 airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py   |  2 +-
 airflow/providers/amazon/aws/triggers/redshift_cluster.py  |  2 +-
 airflow/providers/apache/kafka/hooks/base.py               |  2 +-
 airflow/providers/apache/kafka/hooks/client.py             |  2 +-
 airflow/providers/apache/livy/triggers/livy.py             |  2 +-
 airflow/providers/cncf/kubernetes/triggers/pod.py          |  2 +-
 airflow/providers/dbt/cloud/operators/dbt.py               |  2 +-
 airflow/providers/dbt/cloud/triggers/dbt.py                |  2 +-
 airflow/providers/google/cloud/hooks/bigtable.py           |  2 +-
 airflow/providers/google/cloud/triggers/bigquery.py        | 14 +++++++-------
 airflow/providers/google/cloud/triggers/cloud_build.py     |  2 +-
 airflow/providers/google/cloud/triggers/datafusion.py      |  2 +-
 airflow/providers/google/cloud/triggers/dataproc.py        |  6 +++---
 airflow/providers/google/cloud/triggers/gcs.py             |  6 +++---
 .../providers/google/cloud/triggers/kubernetes_engine.py   |  2 +-
 airflow/providers/google/cloud/triggers/mlengine.py        |  2 +-
 airflow/providers/http/hooks/http.py                       |  2 +-
 airflow/providers/microsoft/azure/triggers/data_factory.py |  4 ++--
 airflow/providers/microsoft/azure/triggers/wasb.py         |  4 ++--
 airflow/stats.py                                           |  2 +-
 airflow/triggers/base.py                                   |  2 +-
 airflow/triggers/external_task.py                          |  4 ++--
 airflow/triggers/file.py                                   |  2 +-
 docs/exts/exampleinclude.py                                |  2 +-
 scripts/ci/pre_commit/pre_commit_insert_extras.py          |  4 ++--
 scripts/ci/pre_commit/pre_commit_local_yml_mounts.py       |  6 +++---
 tests/cli/conftest.py                                      |  2 +-
 .../providers/google/cloud/bigtable/example_bigtable.py    |  2 +-
 30 files changed, 46 insertions(+), 46 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index dbc8229360..c8000c6abb 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -179,7 +179,7 @@ repos:
         # Since ruff makes use of multiple cores we _purposefully_ don't run 
this in docker so it can use the
         # host CPU to it's fullest
         entry: ruff --fix --no-update-check --force-exclude
-        additional_dependencies: ['ruff==0.0.226']
+        additional_dependencies: ['ruff==0.0.262']
         files: \.pyi?$
         exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py
   - repo: https://github.com/asottile/blacken-docs
diff --git a/airflow/kubernetes/pod.py b/airflow/kubernetes/pod.py
index 5b946b2e3a..629cbad17c 100644
--- a/airflow/kubernetes/pod.py
+++ b/airflow/kubernetes/pod.py
@@ -29,7 +29,7 @@ from airflow.exceptions import RemovedInAirflow3Warning
 
 with warnings.catch_warnings():
     warnings.simplefilter("ignore", RemovedInAirflow3Warning)
-    from airflow.providers.cncf.kubernetes.backcompat.pod import Port, 
Resources  # noqa: autoflake
+    from airflow.providers.cncf.kubernetes.backcompat.pod import Port, 
Resources
 
 warnings.warn(
     "This module is deprecated. Please use `kubernetes.client.models` for 
`V1ResourceRequirements` and `Port`.",
diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py 
b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
index 4067217d97..bd2034893e 100644
--- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
@@ -87,7 +87,7 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
     :param dynamodb_scan_kwargs: kwargs pass to 
<https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>
     :param s3_key_prefix: Prefix of s3 object key
     :param process_func: How we transforms a dynamodb item to bytes. By 
default we dump the json
-    """  # noqa: E501
+    """
 
     template_fields: Sequence[str] = (
         *AwsToAwsBaseOperator.template_fields,
diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py 
b/airflow/providers/amazon/aws/triggers/redshift_cluster.py
index a32a6efa19..9e5cac6254 100644
--- a/airflow/providers/amazon/aws/triggers/redshift_cluster.py
+++ b/airflow/providers/amazon/aws/triggers/redshift_cluster.py
@@ -55,7 +55,7 @@ class RedshiftClusterTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         hook = RedshiftAsyncHook(aws_conn_id=self.aws_conn_id)
         while self.attempts >= 1:
             self.attempts = self.attempts - 1
diff --git a/airflow/providers/apache/kafka/hooks/base.py 
b/airflow/providers/apache/kafka/hooks/base.py
index bd3a2d3cc7..eb5429a6a5 100644
--- a/airflow/providers/apache/kafka/hooks/base.py
+++ b/airflow/providers/apache/kafka/hooks/base.py
@@ -58,7 +58,7 @@ class KafkaBaseHook(BaseHook):
 
     @cached_property
     def get_conn(self) -> Any:
-        """get the configuration object"""
+        """Get the configuration object"""
         config = self.get_connection(self.kafka_config_id).extra_dejson
 
         if not (config.get("bootstrap.servers", None)):
diff --git a/airflow/providers/apache/kafka/hooks/client.py 
b/airflow/providers/apache/kafka/hooks/client.py
index 7613bfab22..1043a3edb5 100644
--- a/airflow/providers/apache/kafka/hooks/client.py
+++ b/airflow/providers/apache/kafka/hooks/client.py
@@ -41,7 +41,7 @@ class KafkaAdminClientHook(KafkaBaseHook):
         self,
         topics: Sequence[Sequence[Any]],
     ) -> None:
-        """creates a topic
+        """Creates a topic
 
         :param topics: a list of topics to create including the number of 
partitions for the topic
           and the replication factor. Format: [ ("topic_name", number of 
partitions, replication factor)]
diff --git a/airflow/providers/apache/livy/triggers/livy.py 
b/airflow/providers/apache/livy/triggers/livy.py
index cfcbde53b9..17c5567679 100644
--- a/airflow/providers/apache/livy/triggers/livy.py
+++ b/airflow/providers/apache/livy/triggers/livy.py
@@ -78,7 +78,7 @@ class LivyTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """
         Checks if the _polling_interval > 0, in that case it pools Livy for
         batch termination asynchronously.
diff --git a/airflow/providers/cncf/kubernetes/triggers/pod.py 
b/airflow/providers/cncf/kubernetes/triggers/pod.py
index e024e7786a..b4a8816253 100644
--- a/airflow/providers/cncf/kubernetes/triggers/pod.py
+++ b/airflow/providers/cncf/kubernetes/triggers/pod.py
@@ -116,7 +116,7 @@ class KubernetesPodTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current pod status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         self.log.info("Checking pod %r in namespace %r.", self.pod_name, 
self.pod_namespace)
diff --git a/airflow/providers/dbt/cloud/operators/dbt.py 
b/airflow/providers/dbt/cloud/operators/dbt.py
index 754c0cbdbf..de110e9865 100644
--- a/airflow/providers/dbt/cloud/operators/dbt.py
+++ b/airflow/providers/dbt/cloud/operators/dbt.py
@@ -173,7 +173,7 @@ class DbtCloudRunJobOperator(BaseOperator):
                 )
             return self.run_id
 
-    def execute_complete(self, context: "Context", event: dict[str, Any]) -> 
int:
+    def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
         """
         Callback for when the trigger fires - returns immediately.
         Relies on trigger to throw an exception, otherwise it assumes 
execution was
diff --git a/airflow/providers/dbt/cloud/triggers/dbt.py 
b/airflow/providers/dbt/cloud/triggers/dbt.py
index 9bad789a52..3111073412 100644
--- a/airflow/providers/dbt/cloud/triggers/dbt.py
+++ b/airflow/providers/dbt/cloud/triggers/dbt.py
@@ -64,7 +64,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Make async connection to Dbt, polls for the pipeline run status"""
         hook = DbtCloudHook(self.conn_id)
         try:
diff --git a/airflow/providers/google/cloud/hooks/bigtable.py 
b/airflow/providers/google/cloud/hooks/bigtable.py
index c51c1a990f..999d141547 100644
--- a/airflow/providers/google/cloud/hooks/bigtable.py
+++ b/airflow/providers/google/cloud/hooks/bigtable.py
@@ -248,7 +248,7 @@ class BigtableHook(GoogleBaseHook):
         """
         instance = self.get_instance(instance_id=instance_id, 
project_id=project_id)
         if instance is None:
-            raise RuntimeError("Instance %s did not exist; unable to delete 
table %s" % instance_id, table_id)
+            raise RuntimeError(f"Instance {instance_id} did not exist; unable 
to delete table {table_id}")
         table = instance.table(table_id=table_id)
         table.delete()
 
diff --git a/airflow/providers/google/cloud/triggers/bigquery.py 
b/airflow/providers/google/cloud/triggers/bigquery.py
index e8d8892219..ba4ce8c19b 100644
--- a/airflow/providers/google/cloud/triggers/bigquery.py
+++ b/airflow/providers/google/cloud/triggers/bigquery.py
@@ -71,7 +71,7 @@ class BigQueryInsertJobTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
@@ -122,7 +122,7 @@ class BigQueryCheckTrigger(BigQueryInsertJobTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
@@ -181,7 +181,7 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent with 
response data"""
         hook = self._get_async_hook()
         while True:
@@ -286,7 +286,7 @@ class 
BigQueryIntervalCheckTrigger(BigQueryInsertJobTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
@@ -414,7 +414,7 @@ class BigQueryValueCheckTrigger(BigQueryInsertJobTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
@@ -487,7 +487,7 @@ class BigQueryTableExistenceTrigger(BaseTrigger):
     def _get_async_hook(self) -> BigQueryTableAsyncHook:
         return BigQueryTableAsyncHook(gcp_conn_id=self.gcp_conn_id)
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Will run until the table exists in the Google Big Query."""
         while True:
             try:
@@ -562,7 +562,7 @@ class 
BigQueryTablePartitionExistenceTrigger(BigQueryTableExistenceTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Will run until the table exists in the Google Big Query."""
         hook = BigQueryAsyncHook(gcp_conn_id=self.gcp_conn_id)
         job_id = None
diff --git a/airflow/providers/google/cloud/triggers/cloud_build.py 
b/airflow/providers/google/cloud/triggers/cloud_build.py
index c40588b5ca..3187dc0c1d 100644
--- a/airflow/providers/google/cloud/triggers/cloud_build.py
+++ b/airflow/providers/google/cloud/triggers/cloud_build.py
@@ -75,7 +75,7 @@ class CloudBuildCreateBuildTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current build execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
diff --git a/airflow/providers/google/cloud/triggers/datafusion.py 
b/airflow/providers/google/cloud/triggers/datafusion.py
index 5619d1a26f..34fa7d0258 100644
--- a/airflow/providers/google/cloud/triggers/datafusion.py
+++ b/airflow/providers/google/cloud/triggers/datafusion.py
@@ -80,7 +80,7 @@ class DataFusionStartPipelineTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current pipeline status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
diff --git a/airflow/providers/google/cloud/triggers/dataproc.py 
b/airflow/providers/google/cloud/triggers/dataproc.py
index d3736afca3..c340b44efe 100644
--- a/airflow/providers/google/cloud/triggers/dataproc.py
+++ b/airflow/providers/google/cloud/triggers/dataproc.py
@@ -143,7 +143,7 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         while True:
             cluster = await self.get_async_hook().get_cluster(
                 project_id=self.project_id, region=self.region, 
cluster_name=self.cluster_name
@@ -261,7 +261,7 @@ class DataprocDeleteClusterTrigger(DataprocBaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Wait until cluster is deleted completely"""
         while self.end_time > time.time():
             try:
@@ -309,7 +309,7 @@ class DataprocWorkflowTrigger(DataprocBaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         hook = self.get_async_hook()
         while True:
             try:
diff --git a/airflow/providers/google/cloud/triggers/gcs.py 
b/airflow/providers/google/cloud/triggers/gcs.py
index 7003df5942..13a8fcbde2 100644
--- a/airflow/providers/google/cloud/triggers/gcs.py
+++ b/airflow/providers/google/cloud/triggers/gcs.py
@@ -66,8 +66,8 @@ class GCSBlobTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
-        """loop until the relevant file/folder is found."""
+    async def run(self) -> AsyncIterator[TriggerEvent]:
+        """Loop until the relevant file/folder is found."""
         try:
             hook = self._get_async_hook()
             while True:
@@ -144,7 +144,7 @@ class GCSCheckBlobUpdateTimeTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Loop until the object updated time is greater than target 
datetime"""
         try:
             hook = self._get_async_hook()
diff --git a/airflow/providers/google/cloud/triggers/kubernetes_engine.py 
b/airflow/providers/google/cloud/triggers/kubernetes_engine.py
index cd71e0d9a3..237a88e352 100644
--- a/airflow/providers/google/cloud/triggers/kubernetes_engine.py
+++ b/airflow/providers/google/cloud/triggers/kubernetes_engine.py
@@ -157,7 +157,7 @@ class GKEOperationTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets operation status and yields corresponding event."""
         hook = self._get_hook()
         while True:
diff --git a/airflow/providers/google/cloud/triggers/mlengine.py 
b/airflow/providers/google/cloud/triggers/mlengine.py
index 8d42216dcd..c221308f2a 100644
--- a/airflow/providers/google/cloud/triggers/mlengine.py
+++ b/airflow/providers/google/cloud/triggers/mlengine.py
@@ -88,7 +88,7 @@ class MLEngineStartTrainingJobTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:  # type: 
ignore[override]
+    async def run(self) -> AsyncIterator[TriggerEvent]:  # type: 
ignore[override]
         """Gets current job execution status and yields a TriggerEvent"""
         hook = self._get_async_hook()
         while True:
diff --git a/airflow/providers/http/hooks/http.py 
b/airflow/providers/http/hooks/http.py
index ed1d6dd893..ef09713a8e 100644
--- a/airflow/providers/http/hooks/http.py
+++ b/airflow/providers/http/hooks/http.py
@@ -306,7 +306,7 @@ class HttpAsyncHook(BaseHook):
         data: dict[str, Any] | str | None = None,
         headers: dict[str, Any] | None = None,
         extra_options: dict[str, Any] | None = None,
-    ) -> "ClientResponse":
+    ) -> ClientResponse:
         r"""
         Performs an asynchronous HTTP request call
 
diff --git a/airflow/providers/microsoft/azure/triggers/data_factory.py 
b/airflow/providers/microsoft/azure/triggers/data_factory.py
index 04f5638683..c4c02ee9ff 100644
--- a/airflow/providers/microsoft/azure/triggers/data_factory.py
+++ b/airflow/providers/microsoft/azure/triggers/data_factory.py
@@ -67,7 +67,7 @@ class ADFPipelineRunStatusSensorTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Make async connection to Azure Data Factory, polls for the pipeline 
run status"""
         hook = 
AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id)
         try:
@@ -140,7 +140,7 @@ class AzureDataFactoryTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Make async connection to Azure Data Factory, polls for the pipeline 
run status"""
         hook = 
AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id)
         try:
diff --git a/airflow/providers/microsoft/azure/triggers/wasb.py 
b/airflow/providers/microsoft/azure/triggers/wasb.py
index 6630534c58..2b81df2bbc 100644
--- a/airflow/providers/microsoft/azure/triggers/wasb.py
+++ b/airflow/providers/microsoft/azure/triggers/wasb.py
@@ -63,7 +63,7 @@ class WasbBlobSensorTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Makes async connection to Azure WASB and polls for existence of the 
given blob name."""
         blob_exists = False
         hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, 
public_read=self.public_read)
@@ -138,7 +138,7 @@ class WasbPrefixSensorTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """Makes async connection to Azure WASB and polls for existence of a 
blob with given prefix."""
         prefix_exists = False
         hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, 
public_read=self.public_read)
diff --git a/airflow/stats.py b/airflow/stats.py
index da31e7d913..d8eb1d0f64 100644
--- a/airflow/stats.py
+++ b/airflow/stats.py
@@ -301,7 +301,7 @@ def prepare_stat_with_tags(fn: T) -> T:
             if stat is not None and tags is not None:
                 for k, v in tags.items():
                     if self.metric_tags_validator.test(k):
-                        if all((c not in [",", "="] for c in v + k)):
+                        if all(c not in [",", "="] for c in v + k):
                             stat += f",{k}={v}"
                         else:
                             log.error("Dropping invalid tag: %s=%s.", k, v)
diff --git a/airflow/triggers/base.py b/airflow/triggers/base.py
index fa968ebe9e..314d97b0ee 100644
--- a/airflow/triggers/base.py
+++ b/airflow/triggers/base.py
@@ -60,7 +60,7 @@ class BaseTrigger(abc.ABC, LoggingMixin):
         raise NotImplementedError("Triggers must implement serialize()")
 
     @abc.abstractmethod
-    async def run(self) -> AsyncIterator["TriggerEvent"]:
+    async def run(self) -> AsyncIterator[TriggerEvent]:
         """
         Runs the trigger in an asynchronous context.
 
diff --git a/airflow/triggers/external_task.py 
b/airflow/triggers/external_task.py
index 883753401c..6099dc0a37 100644
--- a/airflow/triggers/external_task.py
+++ b/airflow/triggers/external_task.py
@@ -72,7 +72,7 @@ class TaskStateTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
+    async def run(self) -> typing.AsyncIterator[TriggerEvent]:
         """
         Checks periodically in the database to see if the task exists, and has
         hit one of the states yet, or not.
@@ -136,7 +136,7 @@ class DagStateTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
+    async def run(self) -> typing.AsyncIterator[TriggerEvent]:
         """
         Checks periodically in the database to see if the dag run exists, and 
has
         hit one of the states yet, or not.
diff --git a/airflow/triggers/file.py b/airflow/triggers/file.py
index e85658249f..4128b09814 100644
--- a/airflow/triggers/file.py
+++ b/airflow/triggers/file.py
@@ -58,7 +58,7 @@ class FileTrigger(BaseTrigger):
             },
         )
 
-    async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
+    async def run(self) -> typing.AsyncIterator[TriggerEvent]:
         """Loop until the relevant files are found."""
         while True:
             for path in glob(self.filepath, recursive=self.recursive):
diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py
index 30ca52057d..7a4498d4e7 100644
--- a/docs/exts/exampleinclude.py
+++ b/docs/exts/exampleinclude.py
@@ -36,7 +36,7 @@ from sphinx.util.docutils import SphinxDirective
 from sphinx.util.nodes import set_source_info
 
 try:
-    import sphinx_airflow_theme  # noqa: autoflake
+    import sphinx_airflow_theme
 
     airflow_theme_is_available = True
 except ImportError:
diff --git a/scripts/ci/pre_commit/pre_commit_insert_extras.py 
b/scripts/ci/pre_commit/pre_commit_insert_extras.py
index 3e08bd674d..fac926f611 100755
--- a/scripts/ci/pre_commit/pre_commit_insert_extras.py
+++ b/scripts/ci/pre_commit/pre_commit_insert_extras.py
@@ -27,8 +27,8 @@ sys.path.insert(0, str(Path(__file__).parent.resolve()))  # 
make sure common_pre
 sys.path.insert(0, str(AIRFLOW_SOURCES_DIR))  # make sure setup is imported 
from Airflow
 # flake8: noqa: F401
 
-from common_precommit_utils import insert_documentation  # isort: skip # noqa 
E402
-from setup import EXTRAS_DEPENDENCIES  # isort:skip # noqa
+from common_precommit_utils import insert_documentation  # isort: skip
+from setup import EXTRAS_DEPENDENCIES  # isort:skip
 
 sys.path.append(str(AIRFLOW_SOURCES_DIR))
 
diff --git a/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py 
b/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py
index 6efba5a6aa..e39fe50e0e 100755
--- a/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py
+++ b/scripts/ci/pre_commit/pre_commit_local_yml_mounts.py
@@ -22,16 +22,16 @@ from pathlib import Path
 
 sys.path.insert(0, str(Path(__file__).parent.resolve()))  # make sure 
common_precommit_utils is imported
 
-from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH  # isort: skip # 
noqa E402
+from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH  # isort: skip
 
 sys.path.insert(0, str(AIRFLOW_SOURCES_ROOT_PATH))  # make sure setup is 
imported from Airflow
 sys.path.insert(
     0, str(AIRFLOW_SOURCES_ROOT_PATH / "dev" / "breeze" / "src")
 )  # make sure setup is imported from Airflow
 # flake8: noqa: F401
-from airflow_breeze.utils.docker_command_utils import 
VOLUMES_FOR_SELECTED_MOUNTS  # isort: skip # noqa E402
+from airflow_breeze.utils.docker_command_utils import 
VOLUMES_FOR_SELECTED_MOUNTS  # isort: skip
 
-from common_precommit_utils import insert_documentation  # isort: skip # noqa 
E402
+from common_precommit_utils import insert_documentation  # isort: skip
 
 sys.path.append(str(AIRFLOW_SOURCES_ROOT_PATH))
 
diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py
index 9a9f7c0415..0a0e2ec5cb 100644
--- a/tests/cli/conftest.py
+++ b/tests/cli/conftest.py
@@ -27,7 +27,7 @@ from airflow.executors import celery_executor, 
celery_kubernetes_executor
 from tests.test_utils.config import conf_vars
 
 # Create custom executors here because conftest is imported first
-custom_executor_module = type(sys)("custom_executor")  # noqa
+custom_executor_module = type(sys)("custom_executor")
 custom_executor_module.CustomCeleryExecutor = type(  # type:  ignore
     "CustomCeleryExecutor", (celery_executor.CeleryExecutor,), {}
 )
diff --git a/tests/system/providers/google/cloud/bigtable/example_bigtable.py 
b/tests/system/providers/google/cloud/bigtable/example_bigtable.py
index b105634118..4f06c9c2fa 100644
--- a/tests/system/providers/google/cloud/bigtable/example_bigtable.py
+++ b/tests/system/providers/google/cloud/bigtable/example_bigtable.py
@@ -40,7 +40,7 @@ This DAG relies on the following environment variables:
     See 
https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster
 * CBT_TABLE_ID - desired ID of the Table
 * CBT_POKE_INTERVAL - number of seconds between every attempt of Sensor check
-"""  # noqa: E501
+"""
 from __future__ import annotations
 
 import os

Reply via email to