This is an automated email from the ASF dual-hosted git repository.

uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new b59b2057f7 Enable `TCH004` and `TCH005` rules (#35475)
b59b2057f7 is described below

commit b59b2057f7c9aacb7f52a983b31b28e7b1b576d2
Author: Andrey Anshin <[email protected]>
AuthorDate: Tue Nov 7 06:59:05 2023 +0400

    Enable `TCH004` and `TCH005` rules (#35475)
---
 airflow/dag_processing/processor.py         | 3 +--
 airflow/models/__init__.py                  | 2 ++
 airflow/serialization/pydantic/job.py       | 9 +++++----
 airflow/serialization/serialized_objects.py | 4 ++--
 pyproject.toml                              | 6 +++---
 5 files changed, 13 insertions(+), 11 deletions(-)

diff --git a/airflow/dag_processing/processor.py 
b/airflow/dag_processing/processor.py
index 5242dad956..5209e6bfd2 100644
--- a/airflow/dag_processing/processor.py
+++ b/airflow/dag_processing/processor.py
@@ -40,7 +40,7 @@ from airflow.callbacks.callback_requests import (
 from airflow.configuration import conf
 from airflow.exceptions import AirflowException, TaskNotFound
 from airflow.models import SlaMiss, errors
-from airflow.models.dag import DagModel
+from airflow.models.dag import DAG, DagModel
 from airflow.models.dagbag import DagBag
 from airflow.models.dagrun import DagRun as DR
 from airflow.models.dagwarning import DagWarning, DagWarningType
@@ -63,7 +63,6 @@ if TYPE_CHECKING:
     from sqlalchemy.orm.session import Session
 
     from airflow.callbacks.callback_requests import CallbackRequest
-    from airflow.models.dag import DAG
     from airflow.models.operator import Operator
 
 
diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py
index 4d15dfe5c7..cb08730f53 100644
--- a/airflow/models/__init__.py
+++ b/airflow/models/__init__.py
@@ -91,6 +91,7 @@ __lazy_imports = {
     "DagPickle": "airflow.models.dagpickle",
     "DagRun": "airflow.models.dagrun",
     "DagTag": "airflow.models.dag",
+    "DagWarning": "airflow.models.dagwarning",
     "DbCallbackRequest": "airflow.models.db_callback_request",
     "ImportError": "airflow.models.errors",
     "Log": "airflow.models.log",
@@ -120,6 +121,7 @@ if TYPE_CHECKING:
     from airflow.models.dagbag import DagBag
     from airflow.models.dagpickle import DagPickle
     from airflow.models.dagrun import DagRun
+    from airflow.models.dagwarning import DagWarning
     from airflow.models.db_callback_request import DbCallbackRequest
     from airflow.models.errors import ImportError
     from airflow.models.log import Log
diff --git a/airflow/serialization/pydantic/job.py 
b/airflow/serialization/pydantic/job.py
index 39627f9a99..eb92411090 100644
--- a/airflow/serialization/pydantic/job.py
+++ b/airflow/serialization/pydantic/job.py
@@ -16,16 +16,13 @@
 # under the License.
 import datetime
 from functools import cached_property
-from typing import TYPE_CHECKING, Optional
+from typing import Optional
 
 from pydantic import BaseModel as BaseModelPydantic
 
 from airflow.executors.executor_loader import ExecutorLoader
 from airflow.jobs.base_job_runner import BaseJobRunner
 
-if TYPE_CHECKING:
-    from airflow.jobs.job import Job
-
 
 def check_runner_initialized(job_runner: Optional[BaseJobRunner], job_type: 
str) -> BaseJobRunner:
     if job_runner is None:
@@ -59,11 +56,15 @@ class JobPydantic(BaseModelPydantic):
 
     @cached_property
     def heartrate(self) -> float:
+        from airflow.jobs.job import Job
+
         assert self.job_type is not None
         return Job._heartrate(self.job_type)
 
     def is_alive(self, grace_multiplier=2.1) -> bool:
         """Is this job currently alive."""
+        from airflow.jobs.job import Job
+
         return Job._is_alive(
             job_type=self.job_type,
             heartrate=self.heartrate,
diff --git a/airflow/serialization/serialized_objects.py 
b/airflow/serialization/serialized_objects.py
index 31c0ec6d72..ef35d843b4 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -80,9 +80,9 @@ if TYPE_CHECKING:
 
     HAS_KUBERNETES: bool
     try:
-        from kubernetes.client import models as k8s
+        from kubernetes.client import models as k8s  # noqa: TCH004
 
-        from airflow.providers.cncf.kubernetes.pod_generator import 
PodGenerator
+        from airflow.providers.cncf.kubernetes.pod_generator import 
PodGenerator  # noqa: TCH004
     except ImportError:
         pass
 
diff --git a/pyproject.toml b/pyproject.toml
index 268bf6b71a..9722a0d7ac 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -59,8 +59,7 @@ extend-select = [
     "D403",
     "D412",
     "D419",
-    "TCH001",  # typing-only-first-party-import
-    "TCH002",  # typing-only-third-party-import
+    "TCH",  # Rules around TYPE_CHECKING blocks
     "TID251",  # Specific modules or module members that may not be imported 
or accessed
     "TID253",  # Ban certain modules from being imported at module level
 ]
@@ -71,6 +70,7 @@ extend-ignore = [
     "D214",
     "D215",
     "E731",
+    "TCH003",  # Do not move imports from stdlib to TYPE_CHECKING block
 ]
 
 namespace-packages = ["airflow/providers"]
@@ -113,7 +113,7 @@ required-imports = ["from __future__ import annotations"]
 combine-as-imports = true
 
 [tool.ruff.per-file-ignores]
-"airflow/models/__init__.py" = ["F401"]
+"airflow/models/__init__.py" = ["F401", "TCH004"]
 "airflow/models/sqla_models.py" = ["F401"]
 
 # The test_python.py is needed because adding __future__.annotations breaks 
runtime checks that are

Reply via email to