This is an automated email from the ASF dual-hosted git repository.
rahulvats pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new c8aa74a19e3 Source conf from `airflow.sdk.configuration.conf` for
apache providers (#59966)
c8aa74a19e3 is described below
commit c8aa74a19e39be75800ebd13289bf0713b9718b4
Author: Ankit Chaurasia <[email protected]>
AuthorDate: Wed Dec 31 21:23:01 2025 +0545
Source conf from `airflow.sdk.configuration.conf` for apache providers
(#59966)
* Migrate apache provider to use airflow.sdk.configuration.conf
* Add 'use next version' comment for common-compat dependency
---
providers/apache/beam/pyproject.toml | 2 +-
.../apache/beam/src/airflow/providers/apache/beam/operators/beam.py | 3 +--
providers/apache/hdfs/pyproject.toml | 2 +-
.../apache/hdfs/src/airflow/providers/apache/hdfs/hooks/webhdfs.py | 3 +--
.../hdfs/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py | 2 +-
providers/apache/hive/pyproject.toml | 2 +-
.../apache/hive/src/airflow/providers/apache/hive/hooks/hive.py | 2 +-
.../apache/hive/src/airflow/providers/apache/hive/operators/hive.py | 2 +-
providers/apache/hive/tests/unit/apache/hive/operators/test_hive.py | 2 +-
providers/apache/livy/pyproject.toml | 2 +-
.../apache/livy/src/airflow/providers/apache/livy/operators/livy.py | 3 +--
providers/apache/spark/pyproject.toml | 2 +-
.../spark/src/airflow/providers/apache/spark/hooks/spark_submit.py | 3 +--
.../src/airflow/providers/apache/spark/operators/spark_submit.py | 3 +--
providers/common/compat/src/airflow/providers/common/compat/sdk.py | 5 +++++
15 files changed, 19 insertions(+), 19 deletions(-)
diff --git a/providers/apache/beam/pyproject.toml
b/providers/apache/beam/pyproject.toml
index be90204c9e1..bc4e5b44adb 100644
--- a/providers/apache/beam/pyproject.toml
+++ b/providers/apache/beam/pyproject.toml
@@ -58,7 +58,7 @@ requires-python = ">=3.10,!=3.13"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
'apache-beam>=2.60.0; python_version < "3.13"',
"pyarrow>=16.1.0; python_version < '3.13'",
"numpy>=1.22.4; python_version<'3.11'",
diff --git
a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
index b9cd8044bfc..45ecdb7fe86 100644
--- a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
+++ b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
@@ -30,11 +30,10 @@ from contextlib import ExitStack
from functools import partial
from typing import TYPE_CHECKING, Any
-from airflow.configuration import conf
from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
from airflow.providers.apache.beam.triggers.beam import
BeamJavaPipelineTrigger, BeamPythonPipelineTrigger
-from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
+from airflow.providers.common.compat.sdk import AirflowException,
BaseOperator, conf
from airflow.providers_manager import ProvidersManager
from airflow.utils.helpers import convert_camel_to_snake, exactly_one
from airflow.version import version
diff --git a/providers/apache/hdfs/pyproject.toml
b/providers/apache/hdfs/pyproject.toml
index 9ca3f760d93..6940c313069 100644
--- a/providers/apache/hdfs/pyproject.toml
+++ b/providers/apache/hdfs/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
'hdfs[avro,dataframe,kerberos]>=2.5.4;python_version<"3.12"',
'hdfs[avro,dataframe,kerberos]>=2.7.3;python_version>="3.12"',
'pandas>=2.1.2; python_version <"3.13"',
diff --git
a/providers/apache/hdfs/src/airflow/providers/apache/hdfs/hooks/webhdfs.py
b/providers/apache/hdfs/src/airflow/providers/apache/hdfs/hooks/webhdfs.py
index e0ca522d778..fcf0c098e03 100644
--- a/providers/apache/hdfs/src/airflow/providers/apache/hdfs/hooks/webhdfs.py
+++ b/providers/apache/hdfs/src/airflow/providers/apache/hdfs/hooks/webhdfs.py
@@ -24,8 +24,7 @@ from typing import Any, cast
import requests
from hdfs import HdfsError, InsecureClient
-from airflow.configuration import conf
-from airflow.providers.common.compat.sdk import AirflowException, BaseHook
+from airflow.providers.common.compat.sdk import AirflowException, BaseHook,
conf
log = logging.getLogger(__name__)
diff --git
a/providers/apache/hdfs/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py
b/providers/apache/hdfs/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py
index ed76365ac45..970420a1e1b 100644
---
a/providers/apache/hdfs/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py
+++
b/providers/apache/hdfs/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py
@@ -27,8 +27,8 @@ from urllib.parse import urlsplit
import attrs
-from airflow.configuration import conf
from airflow.providers.apache.hdfs.hooks.webhdfs import WebHDFSHook
+from airflow.providers.common.compat.sdk import conf
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
diff --git a/providers/apache/hive/pyproject.toml
b/providers/apache/hive/pyproject.toml
index a7108dff1e1..e1b34076510 100644
--- a/providers/apache/hive/pyproject.toml
+++ b/providers/apache/hive/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"apache-airflow-providers-common-sql>=1.26.0",
"hmsclient>=0.1.0",
'pandas>=2.1.2; python_version <"3.13"',
diff --git
a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
index e2ad488b448..97de5518368 100644
--- a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
+++ b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
@@ -32,12 +32,12 @@ from deprecated import deprecated
from sqlalchemy.engine import URL
from typing_extensions import overload
-from airflow.configuration import conf
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.compat.sdk import (
AIRFLOW_VAR_NAME_FORMAT_MAPPING,
AirflowException,
BaseHook,
+ conf,
)
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.security import utils
diff --git
a/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive.py
b/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive.py
index 0a2763a658c..78ac3e56dee 100644
--- a/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive.py
+++ b/providers/apache/hive/src/airflow/providers/apache/hive/operators/hive.py
@@ -23,11 +23,11 @@ from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any
-from airflow.configuration import conf
from airflow.providers.apache.hive.hooks.hive import HiveCliHook
from airflow.providers.common.compat.sdk import (
AIRFLOW_VAR_NAME_FORMAT_MAPPING,
BaseOperator,
+ conf,
context_to_airflow_vars,
)
diff --git
a/providers/apache/hive/tests/unit/apache/hive/operators/test_hive.py
b/providers/apache/hive/tests/unit/apache/hive/operators/test_hive.py
index b1fa45734be..b2f5c6193dd 100644
--- a/providers/apache/hive/tests/unit/apache/hive/operators/test_hive.py
+++ b/providers/apache/hive/tests/unit/apache/hive/operators/test_hive.py
@@ -22,9 +22,9 @@ from unittest import mock
import pytest
-from airflow.configuration import conf
from airflow.models import DagRun, TaskInstance
from airflow.providers.apache.hive.operators.hive import HiveOperator
+from airflow.providers.common.compat.sdk import conf
from airflow.utils import timezone
from unit.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment
diff --git a/providers/apache/livy/pyproject.toml
b/providers/apache/livy/pyproject.toml
index 9c1ecd67659..8ae5a28bd7e 100644
--- a/providers/apache/livy/pyproject.toml
+++ b/providers/apache/livy/pyproject.toml
@@ -60,7 +60,7 @@ requires-python = ">=3.10"
dependencies = [
"apache-airflow>=2.11.0",
"apache-airflow-providers-http>=5.1.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"aiohttp>=3.9.2",
"asgiref>=2.3.0",
]
diff --git
a/providers/apache/livy/src/airflow/providers/apache/livy/operators/livy.py
b/providers/apache/livy/src/airflow/providers/apache/livy/operators/livy.py
index ecab22ff9d6..b42de329d33 100644
--- a/providers/apache/livy/src/airflow/providers/apache/livy/operators/livy.py
+++ b/providers/apache/livy/src/airflow/providers/apache/livy/operators/livy.py
@@ -21,14 +21,13 @@ from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any, cast
-from airflow.configuration import conf
from airflow.providers.apache.livy.hooks.livy import BatchState, LivyHook
from airflow.providers.apache.livy.triggers.livy import LivyTrigger
from airflow.providers.common.compat.openlineage.utils.spark import (
inject_parent_job_information_into_spark_properties,
inject_transport_information_into_spark_properties,
)
-from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
+from airflow.providers.common.compat.sdk import AirflowException,
BaseOperator, conf
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
diff --git a/providers/apache/spark/pyproject.toml
b/providers/apache/spark/pyproject.toml
index 51bf98474b6..257c8d97989 100644
--- a/providers/apache/spark/pyproject.toml
+++ b/providers/apache/spark/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"pyspark>=3.5.2",
"grpcio-status>=1.59.0",
]
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
index c9d9b709653..1fe3173bc41 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -30,8 +30,7 @@ from collections.abc import Iterator
from pathlib import Path
from typing import Any
-from airflow.configuration import conf as airflow_conf
-from airflow.providers.common.compat.sdk import AirflowException, BaseHook
+from airflow.providers.common.compat.sdk import AirflowException, BaseHook,
conf as airflow_conf
from airflow.security.kerberos import renew_from_kt
from airflow.utils.log.logging_mixin import LoggingMixin
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
index 8754769d43e..7e26d5b2a6c 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
@@ -20,13 +20,12 @@ from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any
-from airflow.configuration import conf
from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook
from airflow.providers.common.compat.openlineage.utils.spark import (
inject_parent_job_information_into_spark_properties,
inject_transport_information_into_spark_properties,
)
-from airflow.providers.common.compat.sdk import BaseOperator
+from airflow.providers.common.compat.sdk import BaseOperator, conf
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
diff --git a/providers/common/compat/src/airflow/providers/common/compat/sdk.py
b/providers/common/compat/src/airflow/providers/common/compat/sdk.py
index 2d6edd2f43f..c9c4e55c555 100644
--- a/providers/common/compat/src/airflow/providers/common/compat/sdk.py
+++ b/providers/common/compat/src/airflow/providers/common/compat/sdk.py
@@ -76,6 +76,7 @@ if TYPE_CHECKING:
task_decorator_factory as task_decorator_factory,
)
from airflow.sdk.bases.sensor import poke_mode_only as poke_mode_only
+ from airflow.sdk.configuration import conf as conf
from airflow.sdk.definitions.context import context_merge as context_merge
from airflow.sdk.definitions.mappedoperator import MappedOperator as
MappedOperator
from airflow.sdk.definitions.template import literal as literal
@@ -249,6 +250,10 @@ _IMPORT_MAP: dict[str, str | tuple[str, ...]] = {
"airflow.sdk.execution_time.secrets_masker",
"airflow.utils.log.secrets_masker",
),
+ #
============================================================================
+ # Configuration
+ #
============================================================================
+ "conf": ("airflow.sdk.configuration", "airflow.configuration"),
}
# Airflow 3-only exceptions (not available in Airflow 2)