This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c54c1a97220 Add comprehensive compatibility imports for Airflow 2 to 3 
migration (#56790)
c54c1a97220 is described below

commit c54c1a97220756f5af4914333a4ab9941f32e8b1
Author: Kaxil Naik <[email protected]>
AuthorDate: Sat Oct 18 08:30:04 2025 +0100

    Add comprehensive compatibility imports for Airflow 2 to 3 migration 
(#56790)
---
 .pre-commit-config.yaml                            |   6 +
 providers/common/compat/pyproject.toml             |   1 -
 .../airflow/providers/common/compat/lazy_compat.py | 306 ++++++++++++++
 .../providers/common/compat/lazy_compat.pyi        | 230 +++++++++++
 .../providers/common/compat/standard/operators.py  |  38 +-
 .../providers/common/compat/standard/triggers.py   |  12 +-
 .../providers/common/compat/standard/utils.py      |  12 +-
 .../tests/unit/common/compat/test_lazy_compat.py   |  54 +++
 .../ci/prek/check_common_compat_lazy_imports.py    | 438 +++++++++++++++++++++
 9 files changed, 1046 insertions(+), 51 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 94d117d409b..44c06b9f19c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -465,6 +465,12 @@ repos:
         entry: ./scripts/ci/prek/check_airflow_imports.py
           --pattern '^openlineage\.client\.(facet|run)'
           --message "You should import from 
`airflow.providers.common.compat.openlineage.facet` instead."
+      - id: check-common-compat-lazy-imports-in-sync
+        name: Check common.compat lazy_compat.pyi is in sync
+        language: python
+        files: 
^providers/common/compat/src/airflow/providers/common/compat/lazy_compat\.(py|pyi)$
+        pass_filenames: false
+        entry: ./scripts/ci/prek/check_common_compat_lazy_imports.py
       - id: check-airflow-providers-bug-report-template
         name: Sort airflow-bug-report provider list
         language: python
diff --git a/providers/common/compat/pyproject.toml 
b/providers/common/compat/pyproject.toml
index f5e9b26ee20..569e312e5ee 100644
--- a/providers/common/compat/pyproject.toml
+++ b/providers/common/compat/pyproject.toml
@@ -76,7 +76,6 @@ dev = [
     "apache-airflow-task-sdk",
     "apache-airflow-devel-common",
     "apache-airflow-providers-openlineage",
-    "apache-airflow-providers-standard",
     # Additional devel dependencies (do not remove this line and add extra 
development dependencies)
 ]
 
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.py 
b/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.py
new file mode 100644
index 00000000000..1d7907a1202
--- /dev/null
+++ b/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.py
@@ -0,0 +1,306 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Airflow compatibility imports for seamless migration from Airflow 2 to Airflow 
3.
+
+This module provides lazy imports that automatically try Airflow 3 paths first,
+then fall back to Airflow 2 paths, enabling code to work across both versions.
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+# Rename map for classes that changed names between Airflow 2.x and 3.x
+# Format: new_name -> (new_path, old_path, old_name)
+_RENAME_MAP: dict[str, tuple[str, str, str]] = {
+    # Assets: Dataset -> Asset rename in Airflow 3.0
+    "Asset": ("airflow.sdk", "airflow.datasets", "Dataset"),
+    "AssetAlias": ("airflow.sdk", "airflow.datasets", "DatasetAlias"),
+    "AssetAll": ("airflow.sdk", "airflow.datasets", "DatasetAll"),
+    "AssetAny": ("airflow.sdk", "airflow.datasets", "DatasetAny"),
+}
+
+# Import map for classes/functions/constants
+# Format: class_name -> module_path(s)
+# - str: single module path (no fallback)
+# - tuple[str, ...]: multiple module paths (try in order, newest first)
+_IMPORT_MAP: dict[str, str | tuple[str, ...]] = {
+    # 
============================================================================
+    # Hooks
+    # 
============================================================================
+    "BaseHook": ("airflow.sdk", "airflow.hooks.base"),
+    "FSHook": ("airflow.providers.standard.hooks.filesystem", 
"airflow.hooks.filesystem"),
+    "SubprocessHook": ("airflow.providers.standard.hooks.subprocess", 
"airflow.hooks.subprocess"),
+    "PackageIndexHook": (
+        "airflow.providers.standard.hooks.package_index",
+        "airflow.hooks.package_index",
+    ),
+    # 
============================================================================
+    # Sensors
+    # 
============================================================================
+    "BaseSensorOperator": ("airflow.sdk", "airflow.sensors.base"),
+    "PokeReturnValue": ("airflow.sdk", "airflow.sensors.base"),
+    "poke_mode_only": ("airflow.sdk.bases.sensor", "airflow.sensors.base"),
+    "PythonSensor": ("airflow.providers.standard.sensors.python", 
"airflow.sensors.python"),
+    "BashSensor": ("airflow.providers.standard.sensors.bash", 
"airflow.sensors.bash"),
+    "DateTimeSensor": ("airflow.providers.standard.sensors.date_time", 
"airflow.sensors.date_time"),
+    "DateTimeSensorAsync": ("airflow.providers.standard.sensors.date_time", 
"airflow.sensors.date_time"),
+    "TimeSensor": ("airflow.providers.standard.sensors.time", 
"airflow.sensors.time_sensor"),
+    "TimeSensorAsync": ("airflow.providers.standard.sensors.time", 
"airflow.sensors.time_sensor"),
+    "TimeDeltaSensor": ("airflow.providers.standard.sensors.time_delta", 
"airflow.sensors.time_delta"),
+    "TimeDeltaSensorAsync": (
+        "airflow.providers.standard.sensors.time_delta",
+        "airflow.sensors.time_delta",
+    ),
+    "FileSensor": ("airflow.providers.standard.sensors.filesystem", 
"airflow.sensors.filesystem"),
+    "ExternalTaskSensor": (
+        "airflow.providers.standard.sensors.external_task",
+        "airflow.sensors.external_task",
+    ),
+    "ExternalTaskMarker": (
+        "airflow.providers.standard.sensors.external_task",
+        "airflow.sensors.external_task",
+    ),
+    "ExternalDagLink": ("airflow.providers.standard.sensors.external_task", 
"airflow.sensors.external_task"),
+    "DayOfWeekSensor": ("airflow.providers.standard.sensors.weekday", 
"airflow.sensors.weekday"),
+    # 
============================================================================
+    # Operators
+    # 
============================================================================
+    "BaseOperator": ("airflow.sdk", "airflow.models.baseoperator"),
+    "PythonOperator": ("airflow.providers.standard.operators.python", 
"airflow.operators.python"),
+    "BranchPythonOperator": ("airflow.providers.standard.operators.python", 
"airflow.operators.python"),
+    "ShortCircuitOperator": ("airflow.providers.standard.operators.python", 
"airflow.operators.python"),
+    "_SERIALIZERS": ("airflow.providers.standard.operators.python", 
"airflow.operators.python"),
+    "PythonVirtualenvOperator": 
("airflow.providers.standard.operators.python", "airflow.operators.python"),
+    "ExternalPythonOperator": ("airflow.providers.standard.operators.python", 
"airflow.operators.python"),
+    "BranchExternalPythonOperator": (
+        "airflow.providers.standard.operators.python",
+        "airflow.operators.python",
+    ),
+    "BranchPythonVirtualenvOperator": (
+        "airflow.providers.standard.operators.python",
+        "airflow.operators.python",
+    ),
+    "BashOperator": ("airflow.providers.standard.operators.bash", 
"airflow.operators.bash"),
+    "EmptyOperator": ("airflow.providers.standard.operators.empty", 
"airflow.operators.empty"),
+    "LatestOnlyOperator": (
+        "airflow.providers.standard.operators.latest_only",
+        "airflow.operators.latest_only",
+    ),
+    "TriggerDagRunOperator": (
+        "airflow.providers.standard.operators.trigger_dagrun",
+        "airflow.operators.trigger_dagrun",
+    ),
+    "BranchDateTimeOperator": 
("airflow.providers.standard.operators.datetime", "airflow.operators.datetime"),
+    "BranchDayOfWeekOperator": 
("airflow.providers.standard.operators.weekday", "airflow.operators.weekday"),
+    "BranchMixIn": ("airflow.providers.standard.operators.branch", 
"airflow.operators.branch"),
+    "BaseBranchOperator": ("airflow.providers.standard.operators.branch", 
"airflow.operators.branch"),
+    "SmoothOperator": ("airflow.providers.standard.operators.smooth", 
"airflow.operators.smooth"),
+    # 
============================================================================
+    # Decorators
+    # 
============================================================================
+    "task": ("airflow.sdk", "airflow.decorators"),
+    "dag": ("airflow.sdk", "airflow.decorators"),
+    "task_group": ("airflow.sdk", "airflow.decorators"),
+    "setup": ("airflow.sdk", "airflow.decorators"),
+    "teardown": ("airflow.sdk", "airflow.decorators"),
+    "TaskDecorator": ("airflow.sdk.bases.decorator", "airflow.decorators"),
+    # 
============================================================================
+    # Triggers
+    # 
============================================================================
+    "TimeDeltaTrigger": ("airflow.providers.standard.triggers.temporal", 
"airflow.triggers.temporal"),
+    # 
============================================================================
+    # Models
+    # 
============================================================================
+    "Connection": ("airflow.sdk", "airflow.models.connection"),
+    "Variable": ("airflow.sdk", "airflow.models.variable"),
+    "XCom": ("airflow.sdk.execution_time.xcom", "airflow.models.xcom"),
+    "DAG": ("airflow.sdk", "airflow.models.dag"),
+    "DagRun": "airflow.models.dagrun",
+    "TaskInstance": "airflow.models.taskinstance",
+    "Param": ("airflow.sdk", "airflow.models.param"),
+    "XComArg": ("airflow.sdk", "airflow.models.xcom_arg"),
+    "MappedOperator": "airflow.models.mappedoperator",
+    "DecoratedOperator": ("airflow.sdk.bases.decorator", 
"airflow.decorators.base"),
+    "DecoratedMappedOperator": ("airflow.sdk.bases.decorator", 
"airflow.decorators.base"),
+    # 
============================================================================
+    # Exceptions
+    # 
============================================================================
+    "AirflowException": "airflow.exceptions",
+    "AirflowSkipException": "airflow.exceptions",
+    "AirflowFailException": "airflow.exceptions",
+    "AirflowSensorTimeout": "airflow.exceptions",
+    "AirflowTaskTimeout": "airflow.exceptions",
+    "AirflowTaskTerminated": "airflow.exceptions",
+    "AirflowNotFoundException": "airflow.exceptions",
+    "AirflowConfigException": "airflow.exceptions",
+    "AirflowBadRequest": "airflow.exceptions",
+    # 
============================================================================
+    # Assets (Dataset → Asset rename in Airflow 3.0)
+    # 
============================================================================
+    # Note: Asset, AssetAlias, AssetAll, AssetAny are handled by _RENAME_MAP
+    # Metadata moved from airflow.datasets.metadata (2.x) to airflow.sdk (3.x)
+    "Metadata": ("airflow.sdk", "airflow.datasets.metadata"),
+    # 
============================================================================
+    # Notifiers
+    # 
============================================================================
+    "BaseNotifier": ("airflow.sdk", "airflow.notifications.basenotifier"),
+    # 
============================================================================
+    # Operator Links & Task Groups
+    # 
============================================================================
+    "BaseOperatorLink": ("airflow.sdk", "airflow.models.baseoperatorlink"),
+    "TaskGroup": ("airflow.sdk", "airflow.utils.task_group"),
+    # 
============================================================================
+    # Operator Utilities (chain, cross_downstream, etc.)
+    # 
============================================================================
+    "chain": ("airflow.sdk", "airflow.models.baseoperator"),
+    "chain_linear": ("airflow.sdk", "airflow.models.baseoperator"),
+    "cross_downstream": ("airflow.sdk", "airflow.models.baseoperator"),
+    # 
============================================================================
+    # Edge Modifiers & Labels
+    # 
============================================================================
+    "EdgeModifier": ("airflow.sdk", "airflow.utils.edgemodifier"),
+    "Label": ("airflow.sdk", "airflow.utils.edgemodifier"),
+    # 
============================================================================
+    # State Enums
+    # 
============================================================================
+    "DagRunState": ("airflow.sdk", "airflow.utils.state"),
+    "TaskInstanceState": ("airflow.sdk", "airflow.utils.state"),
+    "TriggerRule": ("airflow.sdk", "airflow.utils.trigger_rule"),
+    "WeightRule": ("airflow.sdk", "airflow.utils.weight_rule"),
+    # 
============================================================================
+    # IO & Storage
+    # 
============================================================================
+    "ObjectStoragePath": ("airflow.sdk", "airflow.io.path"),
+    # 
============================================================================
+    # Template Utilities
+    # 
============================================================================
+    "literal": ("airflow.sdk.definitions.template", "airflow.utils.template"),
+    # 
============================================================================
+    # Context & Utilities
+    # 
============================================================================
+    "Context": ("airflow.sdk", "airflow.utils.context"),
+    "get_current_context": ("airflow.sdk", "airflow.operators.python"),
+    "get_parsing_context": ("airflow.sdk", 
"airflow.utils.dag_parsing_context"),
+    # 
============================================================================
+    # Python Virtualenv Utilities
+    # 
============================================================================
+    "prepare_virtualenv": (
+        "airflow.providers.standard.utils.python_virtualenv",
+        "airflow.utils.python_virtualenv",
+    ),
+    "write_python_script": (
+        "airflow.providers.standard.utils.python_virtualenv",
+        "airflow.utils.python_virtualenv",
+    ),
+    # 
============================================================================
+    # XCom & Task Communication
+    # 
============================================================================
+    "XCOM_RETURN_KEY": "airflow.models.xcom",
+}
+
+# Module map: module_name -> module_path(s)
+# For entire modules that have been moved (e.g., timezone)
+# Usage: from airflow.providers.common.compat.lazy_compat import timezone
+_MODULE_MAP: dict[str, str | tuple[str, ...]] = {
+    "timezone": ("airflow.sdk.timezone", "airflow.utils.timezone"),
+    "io": ("airflow.sdk.io", "airflow.io"),
+}
+
+
+def __getattr__(name: str) -> Any:
+    """
+    Lazy import compatibility layer.
+
+    Tries to import from Airflow 3 paths first, falls back to Airflow 2 paths.
+    This enables code to work across both Airflow 2.x and 3.x versions.
+
+    Supports:
+    - Renamed classes from _RENAME_MAP: classes that changed names (e.g., 
Dataset -> Asset)
+    - Attributes from _IMPORT_MAP: classes, functions, constants
+    - Modules from _MODULE_MAP: entire modules that have moved
+
+    :param name: Name of the class/function/module to import
+    :return: The imported class/function/module
+    :raises AttributeError: If the name is not in any map
+    :raises ImportError: If all import paths fail
+    """
+    # Check if this is a renamed class
+    if name in _RENAME_MAP:
+        new_path, old_path, old_name = _RENAME_MAP[name]
+
+        rename_error: ImportError | ModuleNotFoundError | AttributeError | 
None = None
+        # Try new path with new name first (Airflow 3.x)
+        try:
+            module = __import__(new_path, fromlist=[name])
+            return getattr(module, name)
+        except (ImportError, ModuleNotFoundError, AttributeError) as e:
+            rename_error = e
+
+        # Fall back to old path with old name (Airflow 2.x)
+        try:
+            module = __import__(old_path, fromlist=[old_name])
+            return getattr(module, old_name)
+        except (ImportError, ModuleNotFoundError, AttributeError):
+            if rename_error:
+                raise ImportError(
+                    f"Could not import {name!r} from {new_path!r} or 
{old_name!r} from {old_path!r}"
+                ) from rename_error
+            raise
+
+    # Check if this is a module import
+    if name in _MODULE_MAP:
+        import importlib
+
+        paths = _MODULE_MAP[name]
+        if isinstance(paths, str):
+            paths = (paths,)
+
+        module_error: ImportError | ModuleNotFoundError | None = None
+        for module_path in paths:
+            try:
+                return importlib.import_module(module_path)
+            except (ImportError, ModuleNotFoundError) as e:
+                module_error = e
+                continue
+
+        if module_error:
+            raise ImportError(f"Could not import module {name!r} from any of: 
{paths}") from module_error
+
+    # Check if this is an attribute import
+    if name in _IMPORT_MAP:
+        paths = _IMPORT_MAP[name]
+        if isinstance(paths, str):
+            paths = (paths,)
+
+        attr_error: ImportError | ModuleNotFoundError | AttributeError | None 
= None
+        for module_path in paths:
+            try:
+                module = __import__(module_path, fromlist=[name])
+                return getattr(module, name)
+            except (ImportError, ModuleNotFoundError, AttributeError) as e:
+                attr_error = e
+                continue
+
+        if attr_error:
+            raise ImportError(f"Could not import {name!r} from any of: 
{paths}") from attr_error
+
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+__all__ = list(_RENAME_MAP.keys()) + list(_IMPORT_MAP.keys()) + 
list(_MODULE_MAP.keys())
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.pyi 
b/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.pyi
new file mode 100644
index 00000000000..bacf20bde38
--- /dev/null
+++ 
b/providers/common/compat/src/airflow/providers/common/compat/lazy_compat.pyi
@@ -0,0 +1,230 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Type stubs for IDE autocomplete - always uses Airflow 3 paths.
+
+This file is auto-generated from lazy_compat.py.
+    - run scripts/ci/prek/check_common_compat_lazy_imports.py --generate 
instead.
+"""
+
+import airflow.sdk.io as io
+import airflow.sdk.timezone as timezone
+from airflow.exceptions import (
+    AirflowBadRequest as AirflowBadRequest,
+    AirflowConfigException as AirflowConfigException,
+    AirflowException as AirflowException,
+    AirflowFailException as AirflowFailException,
+    AirflowNotFoundException as AirflowNotFoundException,
+    AirflowSensorTimeout as AirflowSensorTimeout,
+    AirflowSkipException as AirflowSkipException,
+    AirflowTaskTerminated as AirflowTaskTerminated,
+    AirflowTaskTimeout as AirflowTaskTimeout,
+)
+from airflow.models.dagrun import DagRun as DagRun
+from airflow.models.mappedoperator import MappedOperator as MappedOperator
+from airflow.models.taskinstance import TaskInstance as TaskInstance
+from airflow.models.xcom import XCOM_RETURN_KEY as XCOM_RETURN_KEY
+from airflow.providers.standard.hooks.filesystem import FSHook as FSHook
+from airflow.providers.standard.hooks.package_index import PackageIndexHook as 
PackageIndexHook
+from airflow.providers.standard.hooks.subprocess import SubprocessHook as 
SubprocessHook
+from airflow.providers.standard.operators.bash import BashOperator as 
BashOperator
+from airflow.providers.standard.operators.branch import (
+    BaseBranchOperator as BaseBranchOperator,
+    BranchMixIn as BranchMixIn,
+)
+from airflow.providers.standard.operators.datetime import 
BranchDateTimeOperator as BranchDateTimeOperator
+from airflow.providers.standard.operators.empty import EmptyOperator as 
EmptyOperator
+from airflow.providers.standard.operators.latest_only import 
LatestOnlyOperator as LatestOnlyOperator
+from airflow.providers.standard.operators.python import (
+    _SERIALIZERS as _SERIALIZERS,
+    BranchExternalPythonOperator as BranchExternalPythonOperator,
+    BranchPythonOperator as BranchPythonOperator,
+    BranchPythonVirtualenvOperator as BranchPythonVirtualenvOperator,
+    ExternalPythonOperator as ExternalPythonOperator,
+    PythonOperator as PythonOperator,
+    PythonVirtualenvOperator as PythonVirtualenvOperator,
+    ShortCircuitOperator as ShortCircuitOperator,
+)
+from airflow.providers.standard.operators.smooth import SmoothOperator as 
SmoothOperator
+from airflow.providers.standard.operators.trigger_dagrun import 
TriggerDagRunOperator as TriggerDagRunOperator
+from airflow.providers.standard.operators.weekday import 
BranchDayOfWeekOperator as BranchDayOfWeekOperator
+from airflow.providers.standard.sensors.bash import BashSensor as BashSensor
+from airflow.providers.standard.sensors.date_time import (
+    DateTimeSensor as DateTimeSensor,
+    DateTimeSensorAsync as DateTimeSensorAsync,
+)
+from airflow.providers.standard.sensors.external_task import (
+    ExternalDagLink as ExternalDagLink,
+    ExternalTaskMarker as ExternalTaskMarker,
+    ExternalTaskSensor as ExternalTaskSensor,
+)
+from airflow.providers.standard.sensors.filesystem import FileSensor as 
FileSensor
+from airflow.providers.standard.sensors.python import PythonSensor as 
PythonSensor
+from airflow.providers.standard.sensors.time import (
+    TimeSensor as TimeSensor,
+    TimeSensorAsync as TimeSensorAsync,
+)
+from airflow.providers.standard.sensors.time_delta import (
+    TimeDeltaSensor as TimeDeltaSensor,
+    TimeDeltaSensorAsync as TimeDeltaSensorAsync,
+)
+from airflow.providers.standard.sensors.weekday import DayOfWeekSensor as 
DayOfWeekSensor
+from airflow.providers.standard.triggers.temporal import TimeDeltaTrigger as 
TimeDeltaTrigger
+from airflow.providers.standard.utils.python_virtualenv import (
+    prepare_virtualenv as prepare_virtualenv,
+    write_python_script as write_python_script,
+)
+from airflow.sdk import (
+    DAG as DAG,
+    Asset as Asset,
+    AssetAlias as AssetAlias,
+    AssetAll as AssetAll,
+    AssetAny as AssetAny,
+    BaseHook as BaseHook,
+    BaseNotifier as BaseNotifier,
+    BaseOperator as BaseOperator,
+    BaseOperatorLink as BaseOperatorLink,
+    BaseSensorOperator as BaseSensorOperator,
+    Connection as Connection,
+    Context as Context,
+    DagRunState as DagRunState,
+    EdgeModifier as EdgeModifier,
+    Label as Label,
+    Metadata as Metadata,
+    ObjectStoragePath as ObjectStoragePath,
+    Param as Param,
+    PokeReturnValue as PokeReturnValue,
+    TaskGroup as TaskGroup,
+    TaskInstanceState as TaskInstanceState,
+    TriggerRule as TriggerRule,
+    Variable as Variable,
+    WeightRule as WeightRule,
+    XComArg as XComArg,
+    chain as chain,
+    chain_linear as chain_linear,
+    cross_downstream as cross_downstream,
+    dag as dag,
+    get_current_context as get_current_context,
+    get_parsing_context as get_parsing_context,
+    setup as setup,
+    task as task,
+    task_group as task_group,
+    teardown as teardown,
+)
+from airflow.sdk.bases.decorator import (
+    DecoratedMappedOperator as DecoratedMappedOperator,
+    DecoratedOperator as DecoratedOperator,
+    TaskDecorator as TaskDecorator,
+)
+from airflow.sdk.bases.sensor import poke_mode_only as poke_mode_only
+from airflow.sdk.definitions.template import literal as literal
+from airflow.sdk.execution_time.xcom import XCom as XCom
+
+__all__: list[str] = [
+    "AirflowBadRequest",
+    "AirflowConfigException",
+    "AirflowException",
+    "AirflowFailException",
+    "AirflowNotFoundException",
+    "AirflowSensorTimeout",
+    "AirflowSkipException",
+    "AirflowTaskTerminated",
+    "AirflowTaskTimeout",
+    "Asset",
+    "AssetAlias",
+    "AssetAll",
+    "AssetAny",
+    "BaseBranchOperator",
+    "BaseHook",
+    "BaseNotifier",
+    "BaseOperator",
+    "BaseOperatorLink",
+    "BaseSensorOperator",
+    "BashOperator",
+    "BashSensor",
+    "BranchDateTimeOperator",
+    "BranchDayOfWeekOperator",
+    "BranchExternalPythonOperator",
+    "BranchMixIn",
+    "BranchPythonOperator",
+    "BranchPythonVirtualenvOperator",
+    "Connection",
+    "Context",
+    "DAG",
+    "DagRun",
+    "DagRunState",
+    "DateTimeSensor",
+    "DateTimeSensorAsync",
+    "DayOfWeekSensor",
+    "DecoratedMappedOperator",
+    "DecoratedOperator",
+    "EdgeModifier",
+    "EmptyOperator",
+    "ExternalDagLink",
+    "ExternalPythonOperator",
+    "ExternalTaskMarker",
+    "ExternalTaskSensor",
+    "FSHook",
+    "FileSensor",
+    "Label",
+    "LatestOnlyOperator",
+    "MappedOperator",
+    "Metadata",
+    "ObjectStoragePath",
+    "PackageIndexHook",
+    "Param",
+    "PokeReturnValue",
+    "PythonOperator",
+    "PythonSensor",
+    "PythonVirtualenvOperator",
+    "ShortCircuitOperator",
+    "SmoothOperator",
+    "SubprocessHook",
+    "TaskDecorator",
+    "TaskGroup",
+    "TaskInstance",
+    "TaskInstanceState",
+    "TimeDeltaSensor",
+    "TimeDeltaSensorAsync",
+    "TimeDeltaTrigger",
+    "TimeSensor",
+    "TimeSensorAsync",
+    "TriggerDagRunOperator",
+    "TriggerRule",
+    "Variable",
+    "WeightRule",
+    "XCOM_RETURN_KEY",
+    "XCom",
+    "XComArg",
+    "_SERIALIZERS",
+    "chain",
+    "chain_linear",
+    "cross_downstream",
+    "dag",
+    "get_current_context",
+    "get_parsing_context",
+    "io",
+    "literal",
+    "poke_mode_only",
+    "prepare_virtualenv",
+    "setup",
+    "task",
+    "task_group",
+    "teardown",
+    "timezone",
+    "write_python_script",
+]
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/standard/operators.py
 
b/providers/common/compat/src/airflow/providers/common/compat/standard/operators.py
index b3d35f1aa14..4190a1a0ace 100644
--- 
a/providers/common/compat/src/airflow/providers/common/compat/standard/operators.py
+++ 
b/providers/common/compat/src/airflow/providers/common/compat/standard/operators.py
@@ -17,35 +17,13 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from airflow.providers.standard.operators.python import (
-        _SERIALIZERS,
-        PythonOperator,
-        ShortCircuitOperator,
-        get_current_context,
-    )
-else:
-    try:
-        from airflow.providers.standard.operators.python import (
-            _SERIALIZERS,
-            PythonOperator,
-            ShortCircuitOperator,
-            get_current_context,
-        )
-    except ModuleNotFoundError:
-        from airflow.operators.python import (
-            _SERIALIZERS,
-            PythonOperator,
-            ShortCircuitOperator,
-        )
-
-    try:
-        from airflow.sdk import get_current_context
-    except (ImportError, ModuleNotFoundError):
-        from airflow.providers.standard.operators.python import 
get_current_context
-
-from airflow.providers.common.compat.version_compat import BaseOperator
+# Re-export from lazy_compat for backward compatibility
+from airflow.providers.common.compat.lazy_compat import (
+    _SERIALIZERS,
+    BaseOperator,
+    PythonOperator,
+    ShortCircuitOperator,
+    get_current_context,
+)
 
 __all__ = ["BaseOperator", "PythonOperator", "_SERIALIZERS", 
"ShortCircuitOperator", "get_current_context"]
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/standard/triggers.py
 
b/providers/common/compat/src/airflow/providers/common/compat/standard/triggers.py
index 1f7f524e886..8c697ff84cf 100644
--- 
a/providers/common/compat/src/airflow/providers/common/compat/standard/triggers.py
+++ 
b/providers/common/compat/src/airflow/providers/common/compat/standard/triggers.py
@@ -17,15 +17,7 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from airflow.providers.standard.triggers.temporal import TimeDeltaTrigger
-else:
-    try:
-        from airflow.providers.standard.triggers.temporal import 
TimeDeltaTrigger
-    except ModuleNotFoundError:
-        from airflow.triggers.temporal import TimeDeltaTrigger
-
+# Re-export from lazy_compat for backward compatibility
+from airflow.providers.common.compat.lazy_compat import TimeDeltaTrigger
 
 __all__ = ["TimeDeltaTrigger"]
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/standard/utils.py 
b/providers/common/compat/src/airflow/providers/common/compat/standard/utils.py
index bfa263d1be9..badad0e8ae0 100644
--- 
a/providers/common/compat/src/airflow/providers/common/compat/standard/utils.py
+++ 
b/providers/common/compat/src/airflow/providers/common/compat/standard/utils.py
@@ -17,15 +17,7 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from airflow.providers.standard.utils.python_virtualenv import 
prepare_virtualenv, write_python_script
-else:
-    try:
-        from airflow.providers.standard.utils.python_virtualenv import 
prepare_virtualenv, write_python_script
-    except ModuleNotFoundError:
-        from airflow.utils.python_virtualenv import prepare_virtualenv, 
write_python_script
-
+# Re-export from lazy_compat for backward compatibility
+from airflow.providers.common.compat.lazy_compat import prepare_virtualenv, 
write_python_script
 
 __all__ = ["write_python_script", "prepare_virtualenv"]
diff --git 
a/providers/common/compat/tests/unit/common/compat/test_lazy_compat.py 
b/providers/common/compat/tests/unit/common/compat/test_lazy_compat.py
new file mode 100644
index 00000000000..c48a9360c34
--- /dev/null
+++ b/providers/common/compat/tests/unit/common/compat/test_lazy_compat.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import pytest
+
+
+def test_all_compat_imports_work():
+    """
+    Test that all items in _IMPORT_MAP can be successfully imported.
+
+    For each item, validates that at least one of the specified import paths 
works,
+    ensuring the fallback mechanism is functional.
+    """
+    from airflow.providers.common.compat import lazy_compat
+
+    failed_imports = []
+
+    for name in lazy_compat.__all__:
+        try:
+            obj = getattr(lazy_compat, name)
+            assert obj is not None, f"{name} imported as None"
+        except (ImportError, AttributeError) as e:
+            failed_imports.append((name, str(e)))
+
+    if failed_imports:
+        error_msg = "The following imports failed:\n"
+        for name, error in failed_imports:
+            error_msg += f"  - {name}: {error}\n"
+        pytest.fail(error_msg)
+
+
+def test_invalid_import_raises_attribute_error():
+    """Test that importing non-existent attribute raises AttributeError."""
+    from airflow.providers.common.compat import lazy_compat
+
+    with pytest.raises(AttributeError, match="has no attribute 
'NonExistentClass'"):
+        _ = lazy_compat.NonExistentClass
diff --git a/scripts/ci/prek/check_common_compat_lazy_imports.py 
b/scripts/ci/prek/check_common_compat_lazy_imports.py
new file mode 100755
index 00000000000..2319c6d7de6
--- /dev/null
+++ b/scripts/ci/prek/check_common_compat_lazy_imports.py
@@ -0,0 +1,438 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Check and generate lazy_compat.pyi from lazy_compat.py.
+
+This script can be used as:
+1. Pre-commit hook - checks if .pyi is in sync with _IMPORT_MAP
+2. Manual generation - generates .pyi file from _IMPORT_MAP
+
+Usage:
+    python scripts/ci/prek/check_common_compat_lazy_imports.py                
# Check only (pre-commit)
+    python scripts/ci/prek/check_common_compat_lazy_imports.py --generate     
# Generate .pyi file
+    python scripts/ci/prek/check_common_compat_lazy_imports.py --validate     
# Generate with import validation
+"""
+
+from __future__ import annotations
+
+import ast
+import sys
+from pathlib import Path
+
+
+def extract_import_map(py_file: Path) -> dict[str, str | tuple[str, ...]]:
+    """
+    Extract _IMPORT_MAP from lazy_compat.py.
+
+    :param py_file: Path to lazy_compat.py
+    :return: Dictionary mapping class names to module paths
+    """
+    content = py_file.read_text()
+    tree = ast.parse(content)
+
+    for node in tree.body:
+        if isinstance(node, ast.AnnAssign) and isinstance(node.target, 
ast.Name):
+            if node.target.id == "_IMPORT_MAP" and node.value:
+                return ast.literal_eval(node.value)
+        elif isinstance(node, ast.Assign):
+            for target in node.targets:
+                if isinstance(target, ast.Name) and target.id == "_IMPORT_MAP":
+                    return ast.literal_eval(node.value)
+
+    raise ValueError("Could not find _IMPORT_MAP in lazy_compat.py")
+
+
+def extract_rename_map(py_file: Path) -> dict[str, tuple[str, str, str]]:
+    """
+    Extract _RENAME_MAP from lazy_compat.py.
+
+    :param py_file: Path to lazy_compat.py
+    :return: Dictionary mapping new class names to (new_path, old_path, 
old_name)
+    """
+    content = py_file.read_text()
+    tree = ast.parse(content)
+
+    for node in tree.body:
+        if isinstance(node, ast.AnnAssign) and isinstance(node.target, 
ast.Name):
+            if node.target.id == "_RENAME_MAP" and node.value:
+                return ast.literal_eval(node.value)
+        elif isinstance(node, ast.Assign):
+            for target in node.targets:
+                if isinstance(target, ast.Name) and target.id == "_RENAME_MAP":
+                    return ast.literal_eval(node.value)
+
+    raise ValueError("Could not find _RENAME_MAP in lazy_compat.py")
+
+
+def extract_module_map(py_file: Path) -> dict[str, str | tuple[str, ...]]:
+    """
+    Extract _MODULE_MAP from lazy_compat.py.
+
+    :param py_file: Path to lazy_compat.py
+    :return: Dictionary mapping module names to module paths
+    """
+    content = py_file.read_text()
+    tree = ast.parse(content)
+
+    for node in tree.body:
+        if isinstance(node, ast.AnnAssign) and isinstance(node.target, 
ast.Name):
+            if node.target.id == "_MODULE_MAP" and node.value:
+                return ast.literal_eval(node.value)
+        elif isinstance(node, ast.Assign):
+            for target in node.targets:
+                if isinstance(target, ast.Name) and target.id == "_MODULE_MAP":
+                    return ast.literal_eval(node.value)
+
+    raise ValueError("Could not find _MODULE_MAP in lazy_compat.py")
+
+
+def generate_pyi_content(
+    rename_map: dict[str, tuple[str, str, str]],
+    import_map: dict[str, str | tuple[str, ...]],
+    module_map: dict[str, str | tuple[str, ...]],
+) -> str:
+    """
+    Generate .pyi stub content from rename, import and module maps.
+
+    :param rename_map: Dictionary mapping new names to (new_path, old_path, 
old_name)
+    :param import_map: Dictionary mapping class names to module paths
+    :param module_map: Dictionary mapping module names to module paths
+    :return: Content for the .pyi file
+    """
+    header = '''# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Type stubs for IDE autocomplete - always uses Airflow 3 paths.
+
+This file is auto-generated from lazy_compat.py.
+    - run scripts/ci/prek/check_common_compat_lazy_imports.py --generate 
instead.
+"""
+
+'''
+
+    imports_by_module: dict[str, list[str]] = {}
+
+    # Process renamed imports from _RENAME_MAP (use new names and new paths 
for type hints)
+    for new_name, (new_path, _old_path, _old_name) in 
sorted(rename_map.items()):
+        if new_path not in imports_by_module:
+            imports_by_module[new_path] = []
+        imports_by_module[new_path].append(new_name)
+
+    # Process regular imports from _IMPORT_MAP
+    for name, paths in sorted(import_map.items()):
+        module_path = paths[0] if isinstance(paths, tuple) else paths
+        if module_path not in imports_by_module:
+            imports_by_module[module_path] = []
+        imports_by_module[module_path].append(name)
+
+    lines = []
+
+    # Generate regular imports (always use multiline for ruff compatibility)
+    for module_path in sorted(imports_by_module.keys()):
+        names = sorted(imports_by_module[module_path])
+        # Always use multiline format for ruff
+        lines.append(f"from {module_path} import (")
+        for i, name in enumerate(names):
+            comma = "," if i < len(names) - 1 else ""
+            lines.append(f"    {name} as {name}{comma}")
+        lines.append(")")
+
+    # Generate module imports (import the module itself)
+    for module_name, paths in sorted(module_map.items()):
+        module_path = paths[0] if isinstance(paths, tuple) else paths
+        lines.append(f"import {module_path} as {module_name}")
+
+    # Generate __all__ (include renamed, attributes and modules)
+    all_names = sorted(list(rename_map.keys()) + list(import_map.keys()) + 
list(module_map.keys()))
+    lines.append("")
+    lines.append(f"__all__: list[str] = {all_names!r}")
+
+    return header + "\n".join(lines) + "\n"
+
+
+def validate_imports(
+    rename_map: dict[str, tuple[str, str, str]],
+    import_map: dict[str, str | tuple[str, ...]],
+    module_map: dict[str, str | tuple[str, ...]],
+    skip_on_error: bool = False,
+) -> list[str]:
+    """
+    Validate that all imports in the maps are actually importable.
+
+    This is optional and only runs if skip_on_error=False. It requires Airflow
+    and all providers to be installed, so it's meant for manual validation 
only.
+
+    :param rename_map: The rename map to validate
+    :param import_map: The import map to validate
+    :param module_map: The module map to validate
+    :param skip_on_error: If True, skip validation and return empty list
+    :return: List of errors (empty if all valid or skipped)
+    """
+    if skip_on_error:
+        print("\nSkipping import validation (requires full Airflow 
installation)")
+        return []
+
+    import importlib
+
+    errors = []
+    print("\nValidating imports (requires Airflow + providers installed)...")
+
+    # Validate renamed imports
+    for new_name, (new_path, old_path, old_name) in rename_map.items():
+        importable = False
+        last_error = None
+
+        # Try new path with new name (Airflow 3.x)
+        try:
+            module = importlib.import_module(new_path)
+            if hasattr(module, new_name):
+                importable = True
+            else:
+                last_error = f"Module {new_path} does not have attribute 
{new_name}"
+        except (ImportError, ModuleNotFoundError) as e:
+            last_error = str(e)
+
+        # Try old path with old name (Airflow 2.x)
+        if not importable:
+            try:
+                module = importlib.import_module(old_path)
+                if hasattr(module, old_name):
+                    importable = True
+                else:
+                    last_error = f"Module {old_path} does not have attribute 
{old_name}"
+            except (ImportError, ModuleNotFoundError) as e:
+                last_error = str(e)
+
+        if not importable:
+            errors.append(
+                f"  ✗ {new_name} (renamed from {old_name}): Could not import. 
Last error: {last_error}"
+            )
+        else:
+            print(f"  ✓ {new_name} (renamed from {old_name})")
+
+    # Validate attribute imports
+    for name, paths in import_map.items():
+        if isinstance(paths, str):
+            paths = (paths,)
+
+        importable = False
+        last_error = None
+
+        for module_path in paths:
+            try:
+                module = importlib.import_module(module_path)
+                if hasattr(module, name):
+                    importable = True
+                    break
+                last_error = f"Module {module_path} does not have attribute 
{name}"
+            except (ImportError, ModuleNotFoundError) as e:
+                last_error = str(e)
+                continue
+
+        if not importable:
+            errors.append(f"  ✗ {name}: Could not import from any path. Last 
error: {last_error}")
+        else:
+            print(f"  ✓ {name}")
+
+    # Validate module imports
+    for module_name, paths in module_map.items():
+        if isinstance(paths, str):
+            paths = (paths,)
+
+        importable = False
+        last_error = None
+
+        for module_path in paths:
+            try:
+                importlib.import_module(module_path)
+                importable = True
+                break
+            except (ImportError, ModuleNotFoundError) as e:
+                last_error = str(e)
+                continue
+
+        if not importable:
+            errors.append(
+                f"  ✗ {module_name} (module): Could not import from any path. 
Last error: {last_error}"
+            )
+        else:
+            print(f"  ✓ {module_name} (module)")
+
+    return errors
+
+
+def main() -> int:
+    """Generate and check lazy_compat.pyi."""
+    repo_root = Path(__file__).parent.parent.parent.parent
+    lazy_compat_py = (
+        repo_root
+        / "providers"
+        / "common"
+        / "compat"
+        / "src"
+        / "airflow"
+        / "providers"
+        / "common"
+        / "compat"
+        / "lazy_compat.py"
+    )
+    lazy_compat_pyi = lazy_compat_py.with_suffix(".pyi")
+
+    if not lazy_compat_py.exists():
+        print(f"ERROR: Could not find {lazy_compat_py}")
+        return 1
+
+    should_generate = "--generate" in sys.argv or "--validate" in sys.argv
+    should_validate = "--validate" in sys.argv
+
+    try:
+        rename_map = extract_rename_map(lazy_compat_py)
+        print(f"Found {len(rename_map)} renames in _RENAME_MAP")
+    except Exception as e:
+        print(f"ERROR: Failed to extract _RENAME_MAP: {e}")
+        return 1
+
+    try:
+        import_map = extract_import_map(lazy_compat_py)
+        print(f"Found {len(import_map)} imports in _IMPORT_MAP")
+    except Exception as e:
+        print(f"ERROR: Failed to extract _IMPORT_MAP: {e}")
+        return 1
+
+    try:
+        module_map = extract_module_map(lazy_compat_py)
+        print(f"Found {len(module_map)} modules in _MODULE_MAP")
+    except Exception as e:
+        print(f"ERROR: Failed to extract _MODULE_MAP: {e}")
+        return 1
+
+    total_imports = len(rename_map) + len(import_map) + len(module_map)
+
+    errors = validate_imports(rename_map, import_map, module_map, 
skip_on_error=not should_validate)
+    if errors:
+        print("\n❌ Import validation failed:")
+        for error in errors:
+            print(error)
+        print("\nPlease fix the import paths in _IMPORT_MAP and _MODULE_MAP 
and try again.")
+        return 1
+
+    if should_validate:
+        print(f"\n✓ All {total_imports} imports validated successfully")
+
+    # Check if .pyi exists and is in sync
+    if not should_generate:
+        # Check-only mode (pre-commit)
+        if not lazy_compat_pyi.exists():
+            print(f"ERROR: {lazy_compat_pyi.name} does not exist")
+            print("Run: python 
scripts/ci/prek/check_common_compat_lazy_imports.py --generate")
+            return 1
+
+        pyi_content = lazy_compat_pyi.read_text()
+
+        # Count total imports in .pyi (each "X as X" pattern + "import X as 
Y", excluding __all__)
+        import re
+
+        # Match "import X.Y.Z as module_name" pattern (standalone module 
imports)
+        module_import_pattern = r"^import\s+[\w.]+\s+as\s+(\w+)"
+        pyi_module_imports = set(re.findall(module_import_pattern, 
pyi_content, re.MULTILINE))
+
+        # Remove __all__ and standalone import lines to avoid false matches
+        pyi_for_attr_search = pyi_content
+        pyi_for_attr_search = re.sub(r"__all__:.*", "", pyi_for_attr_search, 
flags=re.DOTALL)
+        pyi_for_attr_search = re.sub(
+            r"^import\s+[\w.]+\s+as\s+\w+.*$", "", pyi_for_attr_search, 
flags=re.MULTILINE
+        )
+
+        # Match all "Name as Name" patterns
+        attr_import_pattern = r"(\w+)\s+as\s+\1"
+        pyi_attr_imports = set(re.findall(attr_import_pattern, 
pyi_for_attr_search))
+
+        # Combine all expected imports
+        map_renames = set(rename_map.keys())
+        map_attrs = set(import_map.keys())
+        map_modules = set(module_map.keys())
+        all_expected_attrs = map_renames | map_attrs
+
+        # Check for discrepancies
+        missing_attrs = all_expected_attrs - pyi_attr_imports
+        extra_attrs = pyi_attr_imports - all_expected_attrs
+        missing_modules = map_modules - pyi_module_imports
+        extra_modules = pyi_module_imports - map_modules
+
+        if not (missing_attrs or extra_attrs or missing_modules or 
extra_modules):
+            print(f"✓ lazy_compat.pyi is in sync with lazy_compat.py 
({total_imports} imports)")
+            return 0
+
+        # Out of sync
+        if missing_attrs:
+            print(
+                f"ERROR: lazy_compat.pyi is missing {len(missing_attrs)} 
attributes from "
+                "_RENAME_MAP/_IMPORT_MAP:"
+            )
+            for name in sorted(missing_attrs)[:10]:
+                print(f"  - {name}")
+            if len(missing_attrs) > 10:
+                print(f"  ... and {len(missing_attrs) - 10} more")
+
+        if extra_attrs:
+            print(
+                f"ERROR: lazy_compat.pyi has {len(extra_attrs)} extra 
attributes not in "
+                "_RENAME_MAP/_IMPORT_MAP:"
+            )
+            for name in sorted(extra_attrs)[:10]:
+                print(f"  + {name}")
+            if len(extra_attrs) > 10:
+                print(f"  ... and {len(extra_attrs) - 10} more")
+
+        if missing_modules:
+            print(f"ERROR: lazy_compat.pyi is missing {len(missing_modules)} 
modules from _MODULE_MAP:")
+            for name in sorted(missing_modules):
+                print(f"  - {name} (module)")
+
+        if extra_modules:
+            print(f"ERROR: lazy_compat.pyi has {len(extra_modules)} extra 
modules not in _MODULE_MAP:")
+            for name in sorted(extra_modules):
+                print(f"  + {name} (module)")
+
+        print("\nRun: python 
scripts/ci/prek/check_common_compat_lazy_imports.py --generate")
+        return 1
+
+    # Generate mode
+    new_pyi_content = generate_pyi_content(rename_map, import_map, module_map)
+    lazy_compat_pyi.write_text(new_pyi_content)
+    print(f"✓ Generated {lazy_compat_pyi.name} with {total_imports} imports")
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())

Reply via email to