This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v3-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/v3-1-test by this push:
     new 66b6c9b807d [v3-1-test] Remove global from breeze 
PROVIDER_DEPENDENCIES (#58857) (#58865)
66b6c9b807d is described below

commit 66b6c9b807d1248775d918c737b736819fc5b1f1
Author: Jens Scheffler <[email protected]>
AuthorDate: Sun Nov 30 10:56:04 2025 +0100

    [v3-1-test] Remove global from breeze PROVIDER_DEPENDENCIES (#58857) 
(#58865)
    
    * Remove global from breeze PROVIDER_DEPENDENCIES
    
    * Consolidate constants
    
    * Refactor provider package tooling from global constants to provider 
dependencies
    
    * Add typo annotation for mypy
    (cherry picked from commit 9e6c15fd18bf14d24953070a9ba39228f8b8eb2a)
---
 .../src/airflow_breeze/commands/main_command.py    |  2 +-
 .../commands/release_management_commands.py        |  6 +-
 .../src/airflow_breeze/commands/sbom_commands.py   |  4 +-
 dev/breeze/src/airflow_breeze/global_constants.py  | 95 +--------------------
 .../src/airflow_breeze/utils/md5_build_check.py    |  7 +-
 dev/breeze/src/airflow_breeze/utils/packages.py    | 20 +++--
 dev/breeze/src/airflow_breeze/utils/path_utils.py  |  1 +
 .../airflow_breeze/utils/provider_dependencies.py  | 98 ++++++++++++++++++++--
 .../src/airflow_breeze/utils/selective_checks.py   |  6 +-
 devel-common/src/tests_common/pytest_plugin.py     | 16 ++--
 10 files changed, 124 insertions(+), 131 deletions(-)

diff --git a/dev/breeze/src/airflow_breeze/commands/main_command.py 
b/dev/breeze/src/airflow_breeze/commands/main_command.py
index 1b10f91d00c..f07eebbfd44 100644
--- a/dev/breeze/src/airflow_breeze/commands/main_command.py
+++ b/dev/breeze/src/airflow_breeze/commands/main_command.py
@@ -44,12 +44,12 @@ from airflow_breeze.commands.common_options import (
     option_verbose,
 )
 from airflow_breeze.configure_rich_click import click
-from airflow_breeze.global_constants import 
generate_provider_dependencies_if_needed
 from airflow_breeze.utils.click_utils import BreezeGroup
 from airflow_breeze.utils.confirm import Answer, user_confirm
 from airflow_breeze.utils.console import get_console
 from airflow_breeze.utils.docker_command_utils import remove_docker_networks, 
remove_docker_volumes
 from airflow_breeze.utils.path_utils import AIRFLOW_HOME_PATH, BUILD_CACHE_PATH
+from airflow_breeze.utils.provider_dependencies import 
generate_provider_dependencies_if_needed
 from airflow_breeze.utils.run_utils import run_command
 from airflow_breeze.utils.shared_options import get_dry_run
 
diff --git 
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py 
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index e4086e86c00..c512103bbe0 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -166,9 +166,9 @@ from airflow_breeze.utils.path_utils import (
     cleanup_python_generated_files,
 )
 from airflow_breeze.utils.provider_dependencies import (
-    DEPENDENCIES,
     generate_providers_metadata_for_provider,
     get_all_constraint_files_and_airflow_releases,
+    get_provider_dependencies,
     get_related_providers,
     load_constraints,
 )
@@ -2437,7 +2437,7 @@ def generate_issue_content_providers(
         suffix: str
 
     if not provider_distributions:
-        provider_distributions = list(DEPENDENCIES.keys())
+        provider_distributions = list(get_provider_dependencies().keys())
     with ci_group("Generates GitHub issue content with people who can test 
it"):
         if excluded_pr_list:
             excluded_prs = [int(pr) for pr in excluded_pr_list.split(",")]
@@ -2878,7 +2878,7 @@ def generate_providers_metadata(
         airflow_release_dates=airflow_release_dates,
         current_metadata=current_metadata,
     )
-    package_ids = DEPENDENCIES.keys()
+    package_ids = get_provider_dependencies().keys()
     with Pool() as pool:
         results = pool.map(
             partial_generate_providers_metadata,
diff --git a/dev/breeze/src/airflow_breeze/commands/sbom_commands.py 
b/dev/breeze/src/airflow_breeze/commands/sbom_commands.py
index 111995db18d..ef50c1f41a8 100644
--- a/dev/breeze/src/airflow_breeze/commands/sbom_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/sbom_commands.py
@@ -44,7 +44,6 @@ from airflow_breeze.global_constants import (
     AIRFLOW_PYTHON_COMPATIBILITY_MATRIX,
     ALL_HISTORICAL_PYTHON_VERSIONS,
     DEVEL_DEPS_PATH,
-    PROVIDER_DEPENDENCIES,
 )
 from airflow_breeze.utils.cdxgen import (
     CHECK_DOCS,
@@ -90,6 +89,7 @@ from airflow_breeze.utils.projects_google_spreadsheet import (
     read_metadata_from_google_spreadsheet,
     write_sbom_information_to_google_spreadsheet,
 )
+from airflow_breeze.utils.provider_dependencies import 
get_provider_dependencies
 from airflow_breeze.utils.recording import generating_command_images
 from airflow_breeze.utils.shared_options import get_dry_run, get_verbose
 
@@ -624,7 +624,7 @@ def build_all_airflow_images(
 @option_historical_python_versions
 @click.option(
     "--provider-id",
-    type=BetterChoice(list(PROVIDER_DEPENDENCIES.keys())),
+    type=BetterChoice(list(get_provider_dependencies().keys())),
     required=False,
     help="Provider id to generate the requirements for",
 )
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py 
b/dev/breeze/src/airflow_breeze/global_constants.py
index ad0cc1c8d83..a14b4beb0ce 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -20,20 +20,15 @@ Global constants that are used by all other Breeze 
components.
 
 from __future__ import annotations
 
-import json
 import platform
-import subprocess
-from collections.abc import Generator
 from enum import Enum
 from pathlib import Path
-from threading import Lock
 
 from airflow_breeze.utils.functools_cache import clearable_cache
 from airflow_breeze.utils.host_info_utils import Architecture
 from airflow_breeze.utils.path_utils import (
     AIRFLOW_CORE_SOURCES_PATH,
     AIRFLOW_CTL_SOURCES_PATH,
-    AIRFLOW_PYPROJECT_TOML_FILE_PATH,
     AIRFLOW_ROOT_PATH,
     AIRFLOW_TASK_SDK_SOURCES_PATH,
 )
@@ -647,103 +642,15 @@ def get_airflow_extras():
 
 # Initialize integrations
 PROVIDER_RUNTIME_DATA_SCHEMA_PATH = AIRFLOW_CORE_SOURCES_PATH / "airflow" / 
"provider_info.schema.json"
-AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH = AIRFLOW_ROOT_PATH / "generated" 
/ "provider_dependencies.json"
-AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH = (
-    AIRFLOW_ROOT_PATH / "generated" / "provider_dependencies.json.sha256sum"
-)
-
-ALL_PYPROJECT_TOML_FILES = []
-
-
-def get_all_provider_pyproject_toml_provider_yaml_files() -> Generator[Path, 
None, None]:
-    pyproject_toml_content = 
AIRFLOW_PYPROJECT_TOML_FILE_PATH.read_text().splitlines()
-    in_workspace = False
-    for line in pyproject_toml_content:
-        trimmed_line = line.strip()
-        if not in_workspace and trimmed_line.startswith("[tool.uv.workspace]"):
-            in_workspace = True
-        elif in_workspace:
-            if trimmed_line.startswith("#"):
-                continue
-            if trimmed_line.startswith('"'):
-                path = trimmed_line.split('"')[1]
-                ALL_PYPROJECT_TOML_FILES.append(AIRFLOW_ROOT_PATH / path / 
"pyproject.toml")
-                if trimmed_line.startswith('"providers/'):
-                    yield AIRFLOW_ROOT_PATH / path / "pyproject.toml"
-                    yield AIRFLOW_ROOT_PATH / path / "provider.yaml"
-            elif trimmed_line.startswith("]"):
-                break
 
+ALL_PYPROJECT_TOML_FILES: list[Path] = []
 
-_regenerate_provider_deps_lock = Lock()
-_has_regeneration_of_providers_run = False
 
 UPDATE_PROVIDER_DEPENDENCIES_SCRIPT = (
     AIRFLOW_ROOT_PATH / "scripts" / "ci" / "prek" / 
"update_providers_dependencies.py"
 )
 
 
-def regenerate_provider_dependencies_once() -> None:
-    """Run provider dependencies regeneration once per interpreter execution.
-
-    This function is safe to call multiple times from different modules; the
-    underlying command will only run once. If the underlying command fails the
-    CalledProcessError is propagated to the caller.
-    """
-    global _has_regeneration_of_providers_run
-    with _regenerate_provider_deps_lock:
-        if _has_regeneration_of_providers_run:
-            return
-        # Run the regeneration command from the repository root to ensure 
correct
-        # relative paths if the script expects to be run from 
AIRFLOW_ROOT_PATH.
-        subprocess.check_call(
-            ["uv", "run", UPDATE_PROVIDER_DEPENDENCIES_SCRIPT.as_posix()], 
cwd=AIRFLOW_ROOT_PATH
-        )
-        _has_regeneration_of_providers_run = True
-
-
-def _calculate_provider_deps_hash():
-    import hashlib
-
-    hasher = hashlib.sha256()
-    for file in sorted(get_all_provider_pyproject_toml_provider_yaml_files()):
-        hasher.update(file.read_bytes())
-    return hasher.hexdigest()
-
-
-def _run_provider_dependencies_generation(calculated_hash=None) -> dict:
-    if calculated_hash is None:
-        calculated_hash = _calculate_provider_deps_hash()
-    
AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.write_text(calculated_hash)
-    # We use regular print there as rich console might not be initialized yet 
here
-    print("Regenerating provider dependencies file")
-    regenerate_provider_dependencies_once()
-    return json.loads(AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH.read_text())
-
-
-if not AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH.exists():
-    PROVIDER_DEPENDENCIES = _run_provider_dependencies_generation()
-else:
-    PROVIDER_DEPENDENCIES = 
json.loads(AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH.read_text())
-
-
-def generate_provider_dependencies_if_needed():
-    regenerate_provider_dependencies = False
-    if (
-        not AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH.exists()
-        or not AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.exists()
-    ):
-        regenerate_provider_dependencies = True
-        calculated_hash = _calculate_provider_deps_hash()
-    else:
-        calculated_hash = _calculate_provider_deps_hash()
-        if calculated_hash.strip() != 
AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.read_text().strip():
-            regenerate_provider_dependencies = True
-    if regenerate_provider_dependencies:
-        global PROVIDER_DEPENDENCIES
-        PROVIDER_DEPENDENCIES = 
_run_provider_dependencies_generation(calculated_hash)
-
-
 DEVEL_DEPS_PATH = AIRFLOW_ROOT_PATH / "generated" / "devel_deps.txt"
 
 
diff --git a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py 
b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
index 6c0c00c4621..c39a3f39ceb 100644
--- a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
+++ b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
@@ -25,13 +25,10 @@ import os
 from pathlib import Path
 from typing import TYPE_CHECKING
 
-from airflow_breeze.global_constants import (
-    ALL_PYPROJECT_TOML_FILES,
-    FILES_FOR_REBUILD_CHECK,
-    regenerate_provider_dependencies_once,
-)
+from airflow_breeze.global_constants import ALL_PYPROJECT_TOML_FILES, 
FILES_FOR_REBUILD_CHECK
 from airflow_breeze.utils.console import get_console
 from airflow_breeze.utils.path_utils import AIRFLOW_ROOT_PATH
+from airflow_breeze.utils.provider_dependencies import 
regenerate_provider_dependencies_once
 from airflow_breeze.utils.shared_options import get_verbose
 
 if TYPE_CHECKING:
diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py 
b/dev/breeze/src/airflow_breeze/utils/packages.py
index 4e5285e3d06..2587c8f089a 100644
--- a/dev/breeze/src/airflow_breeze/utils/packages.py
+++ b/dev/breeze/src/airflow_breeze/utils/packages.py
@@ -35,7 +35,6 @@ from rich.syntax import Syntax
 from airflow_breeze.global_constants import (
     ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS,
     DEFAULT_PYTHON_MAJOR_MINOR_VERSION,
-    PROVIDER_DEPENDENCIES,
     PROVIDER_RUNTIME_DATA_SCHEMA_PATH,
     REGULAR_DOC_PACKAGES,
 )
@@ -47,7 +46,6 @@ from airflow_breeze.utils.path_utils import (
     BREEZE_SOURCES_PATH,
     DOCS_ROOT,
     PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE_PATH,
-    PROVIDER_DEPENDENCIES_JSON_PATH,
 )
 from airflow_breeze.utils.publish_docs_helpers import (
     PROVIDER_DATA_SCHEMA_PATH,
@@ -317,7 +315,10 @@ def get_available_distributions(
     :param include_all_providers: whether "all-providers" should be included 
ni the list.
 
     """
-    provider_dependencies = 
json.loads(PROVIDER_DEPENDENCIES_JSON_PATH.read_text())
+    # Need lazy import to prevent circular dependencies
+    from airflow_breeze.utils.provider_dependencies import 
get_provider_dependencies
+
+    provider_dependencies = get_provider_dependencies()
 
     valid_states = set()
     if include_not_ready:
@@ -657,7 +658,11 @@ def convert_optional_dependencies_to_table(
 def get_cross_provider_dependent_packages(provider_id: str) -> list[str]:
     if provider_id in get_removed_provider_ids():
         return []
-    return PROVIDER_DEPENDENCIES[provider_id]["cross-providers-deps"]
+
+    # Need lazy import to prevent circular dependencies
+    from airflow_breeze.utils.provider_dependencies import 
get_provider_dependencies
+
+    return get_provider_dependencies()[provider_id]["cross-providers-deps"]
 
 
 def get_license_files(provider_id: str) -> str:
@@ -859,6 +864,9 @@ def get_latest_provider_tag(provider_id: str, suffix: str) 
-> str:
 def regenerate_pyproject_toml(
     context: dict[str, Any], provider_details: ProviderPackageDetails, 
version_suffix: str | None
 ):
+    # Need lazy import to prevent circular dependencies
+    from airflow_breeze.utils.provider_dependencies import 
get_provider_dependencies
+
     get_pyproject_toml_path = provider_details.root_provider_path / 
"pyproject.toml"
     # we want to preserve comments in dependencies - both required and 
additional,
     # so we should not really parse the toml file but extract dependencies "as 
is" in text form and pass
@@ -919,7 +927,9 @@ def regenerate_pyproject_toml(
     context["AIRFLOW_DOC_URL"] = (
         "https://airflow.staged.apache.org"; if version_suffix else 
"https://airflow.apache.org";
     )
-    cross_provider_ids = 
set(PROVIDER_DEPENDENCIES.get(provider_details.provider_id)["cross-providers-deps"])
+    cross_provider_ids = set(
+        
get_provider_dependencies()[provider_details.provider_id]["cross-providers-deps"]
+    )
     cross_provider_dependencies = []
     # Add cross-provider dependencies to the optional dependencies if they are 
missing
     for provider_id in sorted(cross_provider_ids):
diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py 
b/dev/breeze/src/airflow_breeze/utils/path_utils.py
index 2a04c30c0d2..098d7776e20 100644
--- a/dev/breeze/src/airflow_breeze/utils/path_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py
@@ -271,6 +271,7 @@ BUILD_CACHE_PATH = AIRFLOW_ROOT_PATH / ".build"
 GENERATED_PATH = AIRFLOW_ROOT_PATH / "generated"
 CONSTRAINTS_CACHE_PATH = BUILD_CACHE_PATH / "constraints"
 PROVIDER_DEPENDENCIES_JSON_PATH = GENERATED_PATH / "provider_dependencies.json"
+PROVIDER_DEPENDENCIES_JSON_HASH_PATH = GENERATED_PATH / 
"provider_dependencies.json.sha256sum"
 PROVIDER_METADATA_JSON_PATH = GENERATED_PATH / "provider_metadata.json"
 UI_CACHE_PATH = BUILD_CACHE_PATH / "ui"
 AIRFLOW_TMP_PATH = AIRFLOW_ROOT_PATH / "tmp"
diff --git a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py 
b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py
index 9d6e48acaed..7bdb374e82d 100644
--- a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py
+++ b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py
@@ -20,20 +20,104 @@ from __future__ import annotations
 import json
 import re
 import shutil
+import subprocess
 import sys
-from functools import partial
+from collections.abc import Generator
+from functools import cache, partial
 from multiprocessing import Pool
+from pathlib import Path
+from threading import Lock
 from typing import NamedTuple
 
-from airflow_breeze.global_constants import ALL_HISTORICAL_PYTHON_VERSIONS, 
PYTHON_TO_MIN_AIRFLOW_MAPPING
+from airflow_breeze.global_constants import (
+    ALL_HISTORICAL_PYTHON_VERSIONS,
+    ALL_PYPROJECT_TOML_FILES,
+    PYTHON_TO_MIN_AIRFLOW_MAPPING,
+    UPDATE_PROVIDER_DEPENDENCIES_SCRIPT,
+)
 from airflow_breeze.utils.ci_group import ci_group
 from airflow_breeze.utils.console import get_console
 from airflow_breeze.utils.github import download_constraints_file, 
get_active_airflow_versions, get_tag_date
 from airflow_breeze.utils.packages import get_provider_distributions_metadata
-from airflow_breeze.utils.path_utils import CONSTRAINTS_CACHE_PATH, 
PROVIDER_DEPENDENCIES_JSON_PATH
+from airflow_breeze.utils.path_utils import (
+    AIRFLOW_PYPROJECT_TOML_FILE_PATH,
+    AIRFLOW_ROOT_PATH,
+    CONSTRAINTS_CACHE_PATH,
+    PROVIDER_DEPENDENCIES_JSON_HASH_PATH,
+    PROVIDER_DEPENDENCIES_JSON_PATH,
+)
 from airflow_breeze.utils.shared_options import get_verbose
 
-DEPENDENCIES = json.loads(PROVIDER_DEPENDENCIES_JSON_PATH.read_text())
+_regenerate_provider_deps_lock = Lock()
+
+
+def get_all_provider_pyproject_toml_provider_yaml_files() -> Generator[Path, 
None, None]:
+    pyproject_toml_content = 
AIRFLOW_PYPROJECT_TOML_FILE_PATH.read_text().splitlines()
+    in_workspace = False
+    for line in pyproject_toml_content:
+        trimmed_line = line.strip()
+        if not in_workspace and trimmed_line.startswith("[tool.uv.workspace]"):
+            in_workspace = True
+        elif in_workspace:
+            if trimmed_line.startswith("#"):
+                continue
+            if trimmed_line.startswith('"'):
+                path = trimmed_line.split('"')[1]
+                ALL_PYPROJECT_TOML_FILES.append(AIRFLOW_ROOT_PATH / path / 
"pyproject.toml")
+                if trimmed_line.startswith('"providers/'):
+                    yield AIRFLOW_ROOT_PATH / path / "pyproject.toml"
+                    yield AIRFLOW_ROOT_PATH / path / "provider.yaml"
+            elif trimmed_line.startswith("]"):
+                break
+
+
+@cache  # Note: using functools.cache to avoid multiple dumps in the same run
+def regenerate_provider_dependencies_once() -> None:
+    """Run provider dependencies regeneration once per interpreter execution.
+
+    This function is safe to call multiple times from different modules; the
+    underlying command will only run once. If the underlying command fails the
+    CalledProcessError is propagated to the caller.
+    """
+    with _regenerate_provider_deps_lock:
+        # Run the regeneration command from the repository root to ensure 
correct
+        # relative paths if the script expects to be run from 
AIRFLOW_ROOT_PATH.
+        subprocess.check_call(
+            ["uv", "run", UPDATE_PROVIDER_DEPENDENCIES_SCRIPT.as_posix()], 
cwd=AIRFLOW_ROOT_PATH
+        )
+
+
+def _calculate_provider_deps_hash():
+    import hashlib
+
+    hasher = hashlib.sha256()
+    for file in sorted(get_all_provider_pyproject_toml_provider_yaml_files()):
+        hasher.update(file.read_bytes())
+    return hasher.hexdigest()
+
+
+@cache
+def get_provider_dependencies() -> dict:
+    if not PROVIDER_DEPENDENCIES_JSON_PATH.exists():
+        calculated_hash = _calculate_provider_deps_hash()
+        PROVIDER_DEPENDENCIES_JSON_HASH_PATH.write_text(calculated_hash)
+        # We use regular print there as rich console might not be initialized 
yet here
+        print("Regenerating provider dependencies file")
+        regenerate_provider_dependencies_once()
+    return json.loads(PROVIDER_DEPENDENCIES_JSON_PATH.read_text())
+
+
+def generate_provider_dependencies_if_needed():
+    if not PROVIDER_DEPENDENCIES_JSON_PATH.exists() or not 
PROVIDER_DEPENDENCIES_JSON_HASH_PATH.exists():
+        get_provider_dependencies.cache_clear()
+        get_provider_dependencies()
+    else:
+        calculated_hash = _calculate_provider_deps_hash()
+        if calculated_hash.strip() != 
PROVIDER_DEPENDENCIES_JSON_HASH_PATH.read_text().strip():
+            # Force re-generation
+            PROVIDER_DEPENDENCIES_JSON_PATH.unlink(missing_ok=True)
+            get_provider_dependencies.cache_clear()
+            get_provider_dependencies()
 
 
 def get_related_providers(
@@ -54,12 +138,12 @@ def get_related_providers(
     related_providers = set()
     if upstream_dependencies:
         # Providers that use this provider
-        for provider, provider_info in DEPENDENCIES.items():
+        for provider, provider_info in get_provider_dependencies().items():
             if provider_to_check in provider_info["cross-providers-deps"]:
                 related_providers.add(provider)
     # and providers we use directly
     if downstream_dependencies:
-        for dep_name in 
DEPENDENCIES[provider_to_check]["cross-providers-deps"]:
+        for dep_name in 
get_provider_dependencies()[provider_to_check]["cross-providers-deps"]:
             related_providers.add(dep_name)
     return related_providers
 
@@ -204,7 +288,7 @@ def generate_providers_metadata_for_provider(
     current_metadata: dict[str, dict[str, dict[str, str]]],
 ) -> dict[str, dict[str, str]]:
     get_console().print(f"[info]Generating metadata for {provider_id}")
-    provider_yaml_dict = get_provider_distributions_metadata().get(provider_id)
+    provider_yaml_dict = get_provider_distributions_metadata()[provider_id]
     provider_metadata: dict[str, dict[str, str]] = {}
     package_name = "apache-airflow-providers-" + provider_id.replace(".", "-")
     provider_versions = list(reversed(provider_yaml_dict["versions"]))
diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py 
b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
index 76e27683c61..9e71370e247 100644
--- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py
+++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
@@ -71,7 +71,7 @@ from airflow_breeze.utils.path_utils import (
     AIRFLOW_PROVIDERS_ROOT_PATH,
     AIRFLOW_ROOT_PATH,
 )
-from airflow_breeze.utils.provider_dependencies import DEPENDENCIES, 
get_related_providers
+from airflow_breeze.utils.provider_dependencies import 
get_provider_dependencies, get_related_providers
 from airflow_breeze.utils.run_utils import run_command
 
 ALL_VERSIONS_LABEL = "all versions"
@@ -1387,7 +1387,7 @@ class SelectiveChecks:
     @cached_property
     def excluded_providers_as_string(self) -> str:
         providers_to_exclude = defaultdict(list)
-        for provider, provider_info in DEPENDENCIES.items():
+        for provider, provider_info in get_provider_dependencies().items():
             if "excluded-python-versions" in provider_info:
                 for python_version in 
provider_info["excluded-python-versions"]:
                     providers_to_exclude[python_version].append(provider)
@@ -1439,7 +1439,7 @@ class SelectiveChecks:
             if provider == "Providers":
                 all_providers_affected = True
             elif provider is not None:
-                if provider not in DEPENDENCIES:
+                if provider not in get_provider_dependencies():
                     suspended_providers.add(provider)
                 else:
                     affected_providers.add(provider)
diff --git a/devel-common/src/tests_common/pytest_plugin.py 
b/devel-common/src/tests_common/pytest_plugin.py
index 2ea881a259e..ebe160137b3 100644
--- a/devel-common/src/tests_common/pytest_plugin.py
+++ b/devel-common/src/tests_common/pytest_plugin.py
@@ -152,8 +152,8 @@ AIRFLOW_PYPROJECT_TOML_FILE_PATH = AIRFLOW_ROOT_PATH / 
"pyproject.toml"
 AIRFLOW_CORE_SOURCES_PATH = AIRFLOW_ROOT_PATH / "airflow-core" / "src"
 AIRFLOW_CORE_TESTS_PATH = AIRFLOW_ROOT_PATH / "airflow-core" / "tests"
 AIRFLOW_PROVIDERS_ROOT_PATH = AIRFLOW_ROOT_PATH / "providers"
-AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH = AIRFLOW_ROOT_PATH / "generated" 
/ "provider_dependencies.json"
-AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH = (
+PROVIDER_DEPENDENCIES_JSON_PATH = AIRFLOW_ROOT_PATH / "generated" / 
"provider_dependencies.json"
+PROVIDER_DEPENDENCIES_JSON_HASH_PATH = (
     AIRFLOW_ROOT_PATH / "generated" / "provider_dependencies.json.sha256sum"
 )
 UPDATE_PROVIDER_DEPENDENCIES_SCRIPT = (
@@ -194,19 +194,13 @@ def _calculate_provider_deps_hash():
     return hasher.hexdigest()
 
 
-if (
-    not AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_PATH.exists()
-    or not AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.exists()
-):
+if not PROVIDER_DEPENDENCIES_JSON_PATH.exists() or not 
PROVIDER_DEPENDENCIES_JSON_HASH_PATH.exists():
     subprocess.check_call(["uv", "run", 
UPDATE_PROVIDER_DEPENDENCIES_SCRIPT.as_posix()])
 else:
     calculated_provider_deps_hash = _calculate_provider_deps_hash()
-    if (
-        calculated_provider_deps_hash.strip()
-        != 
AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.read_text().strip()
-    ):
+    if calculated_provider_deps_hash.strip() != 
PROVIDER_DEPENDENCIES_JSON_HASH_PATH.read_text().strip():
         subprocess.check_call(["uv", "run", 
UPDATE_PROVIDER_DEPENDENCIES_SCRIPT.as_posix()])
-        
AIRFLOW_GENERATED_PROVIDER_DEPENDENCIES_HASH_PATH.write_text(calculated_provider_deps_hash)
+        
PROVIDER_DEPENDENCIES_JSON_HASH_PATH.write_text(calculated_provider_deps_hash)
 # End of copied code from breeze
 
 os.environ["AIRFLOW__CORE__ALLOWED_DESERIALIZATION_CLASSES"] = 
"airflow.*\nunit.*\n"

Reply via email to