This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 4983b2a0236 Extract shared "module_loading" distribution (#59139)
4983b2a0236 is described below

commit 4983b2a02365284d4f44546cecdc40fa475a5ec8
Author: Jarek Potiuk <[email protected]>
AuthorDate: Wed Dec 10 09:20:18 2025 +0100

    Extract shared "module_loading" distribution (#59139)
    
    This PR extracts shared "module_loading" distrubution that is going
    to be used in both - airflow-core and task-sdk.
---
 .../docs/authoring-and-scheduling/serializers.rst  |  2 +-
 airflow-core/pyproject.toml                        |  6 +-
 airflow-core/src/airflow/_shared/module_loading    |  1 +
 airflow-core/src/airflow/cli/cli_config.py         |  2 +-
 .../src/airflow/cli/commands/db_manager_command.py |  2 +-
 airflow-core/src/airflow/configuration.py          |  2 +-
 .../src/airflow/dag_processing/bundles/manager.py  |  2 +-
 airflow-core/src/airflow/exceptions.py             |  2 +-
 .../src/airflow/executors/executor_loader.py       |  2 +-
 .../src/airflow/jobs/triggerer_job_runner.py       |  2 +-
 airflow-core/src/airflow/logging_config.py         |  2 +-
 airflow-core/src/airflow/models/__init__.py        |  2 +-
 airflow-core/src/airflow/models/connection.py      |  2 +-
 airflow-core/src/airflow/plugins_manager.py        |  2 +-
 airflow-core/src/airflow/providers_manager.py      |  2 +-
 airflow-core/src/airflow/serialization/decoders.py |  2 +-
 airflow-core/src/airflow/serialization/encoders.py |  2 +-
 airflow-core/src/airflow/serialization/serde.py    |  2 +-
 .../airflow/serialization/serialized_objects.py    |  2 +-
 .../airflow/serialization/serializers/bignum.py    |  2 +-
 .../airflow/serialization/serializers/builtin.py   |  2 +-
 .../airflow/serialization/serializers/datetime.py  |  2 +-
 .../airflow/serialization/serializers/deltalake.py |  2 +-
 .../airflow/serialization/serializers/iceberg.py   |  2 +-
 .../serialization/serializers/kubernetes.py        |  2 +-
 .../src/airflow/serialization/serializers/numpy.py |  2 +-
 .../airflow/serialization/serializers/pandas.py    |  2 +-
 .../airflow/serialization/serializers/pydantic.py  |  2 +-
 .../airflow/serialization/serializers/timezone.py  |  2 +-
 airflow-core/src/airflow/task/priority_strategy.py |  2 +-
 airflow-core/src/airflow/triggers/callback.py      |  2 +-
 airflow-core/src/airflow/utils/db_manager.py       |  2 +-
 .../src/airflow/utils/serve_logs/log_server.py     |  2 +-
 .../tests/unit/always/test_project_structure.py    |  1 -
 .../core_api/routes/public/test_dag_run.py         |  2 +-
 .../tests/unit/core/test_example_dags_system.py    |  2 +-
 airflow-core/tests/unit/models/test_dag.py         |  2 +-
 .../tests/unit/plugins/test_plugins_manager.py     |  2 +-
 .../serialization/serializers/test_serializers.py  |  2 +-
 .../unit/serialization/test_dag_serialization.py   |  2 +-
 .../tests/unit/serialization/test_serde.py         |  2 +-
 .../src/tests_common/test_utils/providers.py       |  2 +-
 providers/apache/kafka/pyproject.toml              |  2 +-
 .../providers/apache/kafka/hooks/consume.py        |  2 +-
 .../providers/apache/kafka/operators/consume.py    |  2 +-
 .../providers/apache/kafka/operators/produce.py    |  2 +-
 .../apache/kafka/triggers/await_message.py         |  2 +-
 .../common/compat/module_loading/__init__.py       | 49 +++++++++++++++
 providers/common/sql/pyproject.toml                |  2 +-
 .../src/airflow/providers/common/sql/hooks/sql.py  |  4 +-
 providers/elasticsearch/pyproject.toml             |  2 +-
 .../providers/elasticsearch/log/es_task_handler.py |  2 +-
 providers/microsoft/azure/pyproject.toml           |  2 +-
 .../providers/microsoft/azure/triggers/msgraph.py  |  2 +-
 providers/openlineage/pyproject.toml               |  2 +-
 .../airflow/providers/openlineage/utils/utils.py   |  2 +-
 providers/opensearch/pyproject.toml                |  2 +-
 .../providers/opensearch/log/os_task_handler.py    |  2 +-
 pyproject.toml                                     |  3 +
 shared/configuration/pyproject.toml                |  1 +
 .../src/airflow_shared/configuration/parser.py     |  2 +-
 .../pyproject.toml                                 |  8 +--
 .../src/airflow_shared/module_loading/__init__.py  | 22 +++++++
 .../module_loading/tests/conftest.py               | 18 +-----
 .../tests/module_loading/__init__.py               | 18 ------
 .../tests/module_loading}/test_module_loading.py   | 20 +++++-
 task-sdk/pyproject.toml                            |  6 +-
 task-sdk/src/airflow/sdk/_shared/module_loading    |  1 +
 task-sdk/src/airflow/sdk/configuration.py          |  2 +-
 task-sdk/src/airflow/sdk/definitions/callback.py   |  2 +-
 task-sdk/src/airflow/sdk/definitions/connection.py |  2 +-
 task-sdk/src/airflow/sdk/definitions/dag.py        |  2 +-
 task-sdk/src/airflow/sdk/io/fs.py                  |  2 +-
 task-sdk/src/airflow/sdk/io/store.py               |  2 +-
 task-sdk/src/airflow/sdk/module_loading.py         | 71 ++--------------------
 .../tests/task_sdk/definitions/test_callback.py    |  2 +-
 .../tests/task_sdk/definitions/test_connection.py  |  2 +-
 .../task_sdk/definitions/test_module_loading.py    | 23 -------
 .../tests/task_sdk/execution_time/test_sentry.py   |  2 +-
 task-sdk/tests/task_sdk/io/test_path.py            |  2 +-
 80 files changed, 180 insertions(+), 200 deletions(-)

diff --git a/airflow-core/docs/authoring-and-scheduling/serializers.rst 
b/airflow-core/docs/authoring-and-scheduling/serializers.rst
index fb19a111d91..460d72d50d2 100644
--- a/airflow-core/docs/authoring-and-scheduling/serializers.rst
+++ b/airflow-core/docs/authoring-and-scheduling/serializers.rst
@@ -87,7 +87,7 @@ Registered
 
     from typing import TYPE_CHECKING
 
-    from airflow.utils.module_loading import qualname
+    from airflow.sdk.module_loading import qualname
 
     if TYPE_CHECKING:
         import decimal
diff --git a/airflow-core/pyproject.toml b/airflow-core/pyproject.toml
index 8a91dbf7f2d..6c46b152642 100644
--- a/airflow-core/pyproject.toml
+++ b/airflow-core/pyproject.toml
@@ -223,11 +223,12 @@ exclude = [
 
 [tool.hatch.build.targets.sdist.force-include]
 "../shared/configuration/src/airflow_shared/configuration" = 
"src/airflow/_shared/configuration"
+"../shared/module_loading/src/airflow_shared/module_loading" = 
"src/airflow/_shared/module_loading"
 "../shared/logging/src/airflow_shared/logging" = "src/airflow/_shared/logging"
+"../shared/observability/src/airflow_shared/observability" = 
"src/airflow/_shared/observability"
+"../shared/secrets_backend/src/airflow_shared/secrets_backend" = 
"src/airflow/_shared/secrets_backend"
 "../shared/secrets_masker/src/airflow_shared/secrets_masker" = 
"src/airflow/_shared/secrets_masker"
 "../shared/timezones/src/airflow_shared/timezones" = 
"src/airflow/_shared/timezones"
-"../shared/secrets_backend/src/airflow_shared/secrets_backend" = 
"src/airflow/_shared/secrets_backend"
-"../shared/observability/src/airflow_shared/observability" = 
"src/airflow/_shared/observability"
 
 [tool.hatch.build.targets.custom]
 path = "./hatch_build.py"
@@ -297,6 +298,7 @@ apache-airflow-devel-common = { workspace = true }
 shared_distributions = [
     "apache-airflow-shared-configuration",
     "apache-airflow-shared-logging",
+    "apache-airflow-shared-module-loading",
     "apache-airflow-shared-secrets-backend",
     "apache-airflow-shared-secrets-masker",
     "apache-airflow-shared-timezones",
diff --git a/airflow-core/src/airflow/_shared/module_loading 
b/airflow-core/src/airflow/_shared/module_loading
new file mode 120000
index 00000000000..f0d25e8a478
--- /dev/null
+++ b/airflow-core/src/airflow/_shared/module_loading
@@ -0,0 +1 @@
+../../../../shared/module_loading/src/airflow_shared/module_loading
\ No newline at end of file
diff --git a/airflow-core/src/airflow/cli/cli_config.py 
b/airflow-core/src/airflow/cli/cli_config.py
index 0e0bb1f4408..0a02680a136 100644
--- a/airflow-core/src/airflow/cli/cli_config.py
+++ b/airflow-core/src/airflow/cli/cli_config.py
@@ -29,12 +29,12 @@ from typing import NamedTuple
 
 import lazy_object_proxy
 
+from airflow._shared.module_loading import import_string
 from airflow._shared.timezones.timezone import parse as parsedate
 from airflow.cli.commands.legacy_commands import check_legacy_command
 from airflow.configuration import conf
 from airflow.jobs.job import JobState
 from airflow.utils.cli import ColorMode
-from airflow.utils.module_loading import import_string
 from airflow.utils.state import DagRunState
 
 BUILD_DOCS = "BUILDING_AIRFLOW_DOCS" in os.environ
diff --git a/airflow-core/src/airflow/cli/commands/db_manager_command.py 
b/airflow-core/src/airflow/cli/commands/db_manager_command.py
index 7961ea12d02..988c85fbae5 100644
--- a/airflow-core/src/airflow/cli/commands/db_manager_command.py
+++ b/airflow-core/src/airflow/cli/commands/db_manager_command.py
@@ -17,10 +17,10 @@
 from __future__ import annotations
 
 from airflow import settings
+from airflow._shared.module_loading import import_string
 from airflow.cli.commands.db_command import run_db_downgrade_command, 
run_db_migrate_command
 from airflow.configuration import conf
 from airflow.utils import cli as cli_utils
-from airflow.utils.module_loading import import_string
 from airflow.utils.providers_configuration_loader import 
providers_configuration_loaded
 
 
diff --git a/airflow-core/src/airflow/configuration.py 
b/airflow-core/src/airflow/configuration.py
index cd16658e929..793718669a6 100644
--- a/airflow-core/src/airflow/configuration.py
+++ b/airflow-core/src/airflow/configuration.py
@@ -43,11 +43,11 @@ from airflow._shared.configuration.parser import (
     AirflowConfigParser as _SharedAirflowConfigParser,
     ValueNotFound,
 )
+from airflow._shared.module_loading import import_string
 from airflow.exceptions import AirflowConfigException
 from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH
 from airflow.task.weight_rule import WeightRule
 from airflow.utils import yaml
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from airflow.api_fastapi.auth.managers.base_auth_manager import 
BaseAuthManager
diff --git a/airflow-core/src/airflow/dag_processing/bundles/manager.py 
b/airflow-core/src/airflow/dag_processing/bundles/manager.py
index 6173ae9adc5..738e0785e19 100644
--- a/airflow-core/src/airflow/dag_processing/bundles/manager.py
+++ b/airflow-core/src/airflow/dag_processing/bundles/manager.py
@@ -23,13 +23,13 @@ from itsdangerous import URLSafeSerializer
 from pydantic import BaseModel, ValidationError
 from sqlalchemy import delete, select
 
+from airflow._shared.module_loading import import_string
 from airflow.configuration import conf
 from airflow.dag_processing.bundles.base import BaseDagBundle  # noqa: TC001
 from airflow.exceptions import AirflowConfigException
 from airflow.models.dagbundle import DagBundleModel
 from airflow.models.team import Team
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.session import NEW_SESSION, provide_session
 
 if TYPE_CHECKING:
diff --git a/airflow-core/src/airflow/exceptions.py 
b/airflow-core/src/airflow/exceptions.py
index ea45e8942ca..c229495ea05 100644
--- a/airflow-core/src/airflow/exceptions.py
+++ b/airflow-core/src/airflow/exceptions.py
@@ -328,7 +328,7 @@ def __getattr__(name: str):
         import warnings
 
         from airflow import DeprecatedImportWarning
-        from airflow.utils.module_loading import import_string
+        from airflow._shared.module_loading import import_string
 
         target_path = f"airflow.sdk.exceptions.{name}"
         warnings.warn(
diff --git a/airflow-core/src/airflow/executors/executor_loader.py 
b/airflow-core/src/airflow/executors/executor_loader.py
index 5c86c3020bc..5b1c8a8e845 100644
--- a/airflow-core/src/airflow/executors/executor_loader.py
+++ b/airflow-core/src/airflow/executors/executor_loader.py
@@ -24,6 +24,7 @@ from typing import TYPE_CHECKING
 
 import structlog
 
+from airflow._shared.module_loading import import_string
 from airflow.exceptions import AirflowConfigException, UnknownExecutorException
 from airflow.executors.executor_constants import (
     CELERY_EXECUTOR,
@@ -34,7 +35,6 @@ from airflow.executors.executor_constants import (
 )
 from airflow.executors.executor_utils import ExecutorName
 from airflow.models.team import Team
-from airflow.utils.module_loading import import_string
 
 log = structlog.get_logger(__name__)
 
diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py 
b/airflow-core/src/airflow/jobs/triggerer_job_runner.py
index e2ad39c22e5..13ec255832f 100644
--- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py
+++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py
@@ -38,6 +38,7 @@ from pydantic import BaseModel, Field, TypeAdapter
 from sqlalchemy import func, select
 from structlog.contextvars import bind_contextvars as bind_log_contextvars
 
+from airflow._shared.module_loading import import_string
 from airflow._shared.timezones import timezone
 from airflow.configuration import conf
 from airflow.executors import workloads
@@ -78,7 +79,6 @@ from airflow.sdk.execution_time.supervisor import 
WatchedSubprocess, make_buffer
 from airflow.triggers import base as events
 from airflow.utils.helpers import log_filename_template_renderer
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.session import provide_session
 
 if TYPE_CHECKING:
diff --git a/airflow-core/src/airflow/logging_config.py 
b/airflow-core/src/airflow/logging_config.py
index 94c06bac3e0..c942f4703c1 100644
--- a/airflow-core/src/airflow/logging_config.py
+++ b/airflow-core/src/airflow/logging_config.py
@@ -22,9 +22,9 @@ import warnings
 from importlib import import_module
 from typing import TYPE_CHECKING, Any
 
+from airflow._shared.module_loading import import_string
 from airflow.configuration import conf
 from airflow.exceptions import AirflowConfigException
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from airflow.logging.remote import RemoteLogIO
diff --git a/airflow-core/src/airflow/models/__init__.py 
b/airflow-core/src/airflow/models/__init__.py
index 231d0f019af..3123e796888 100644
--- a/airflow-core/src/airflow/models/__init__.py
+++ b/airflow-core/src/airflow/models/__init__.py
@@ -81,7 +81,7 @@ def __getattr__(name):
     if not path:
         raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
 
-    from airflow.utils.module_loading import import_string
+    from airflow._shared.module_loading import import_string
 
     val = import_string(f"{path}.{name}")
 
diff --git a/airflow-core/src/airflow/models/connection.py 
b/airflow-core/src/airflow/models/connection.py
index 7e44c636c20..971b3553f96 100644
--- a/airflow-core/src/airflow/models/connection.py
+++ b/airflow-core/src/airflow/models/connection.py
@@ -30,6 +30,7 @@ from urllib.parse import parse_qsl, quote, unquote, 
urlencode, urlsplit
 from sqlalchemy import Boolean, ForeignKey, Integer, String, Text, select
 from sqlalchemy.orm import Mapped, declared_attr, reconstructor, synonym
 
+from airflow._shared.module_loading import import_string
 from airflow._shared.secrets_masker import mask_secret
 from airflow.configuration import ensure_secrets_loaded
 from airflow.exceptions import AirflowException, AirflowNotFoundException
@@ -38,7 +39,6 @@ from airflow.models.crypto import get_fernet
 from airflow.sdk import SecretCache
 from airflow.utils.helpers import prune_dict
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.session import NEW_SESSION, provide_session
 from airflow.utils.sqlalchemy import mapped_column
 
diff --git a/airflow-core/src/airflow/plugins_manager.py 
b/airflow-core/src/airflow/plugins_manager.py
index 06f6fd3a282..c32bcae8ea5 100644
--- a/airflow-core/src/airflow/plugins_manager.py
+++ b/airflow-core/src/airflow/plugins_manager.py
@@ -31,6 +31,7 @@ from pathlib import Path
 from typing import TYPE_CHECKING, Any
 
 from airflow import settings
+from airflow._shared.module_loading import import_string, qualname
 from airflow.configuration import conf
 from airflow.task.priority_strategy import (
     PriorityWeightStrategy,
@@ -38,7 +39,6 @@ from airflow.task.priority_strategy import (
 )
 from airflow.utils.entry_points import entry_points_with_dist
 from airflow.utils.file import find_path_from_directory
-from airflow.utils.module_loading import import_string, qualname
 
 if TYPE_CHECKING:
     from airflow.lineage.hook import HookLineageReader
diff --git a/airflow-core/src/airflow/providers_manager.py 
b/airflow-core/src/airflow/providers_manager.py
index 20fc79bd7fa..b13ab7ccd6c 100644
--- a/airflow-core/src/airflow/providers_manager.py
+++ b/airflow-core/src/airflow/providers_manager.py
@@ -35,10 +35,10 @@ from typing import TYPE_CHECKING, Any, NamedTuple, 
ParamSpec, TypeVar
 
 from packaging.utils import canonicalize_name
 
+from airflow._shared.module_loading import import_string
 from airflow.exceptions import AirflowOptionalProviderFeatureException
 from airflow.utils.entry_points import entry_points_with_dist
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.singleton import Singleton
 
 log = logging.getLogger(__name__)
diff --git a/airflow-core/src/airflow/serialization/decoders.py 
b/airflow-core/src/airflow/serialization/decoders.py
index afd6d297f3f..8cb73852bfc 100644
--- a/airflow-core/src/airflow/serialization/decoders.py
+++ b/airflow-core/src/airflow/serialization/decoders.py
@@ -23,6 +23,7 @@ from typing import TYPE_CHECKING, Any, TypeVar
 
 import dateutil.relativedelta
 
+from airflow._shared.module_loading import import_string
 from airflow.sdk import (  # TODO: Implement serialized assets.
     Asset,
     AssetAlias,
@@ -32,7 +33,6 @@ from airflow.sdk import (  # TODO: Implement serialized 
assets.
 from airflow.serialization.definitions.assets import SerializedAssetWatcher
 from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
 from airflow.serialization.helpers import find_registered_custom_timetable, 
is_core_timetable_import_path
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from airflow.sdk.definitions.asset import BaseAsset
diff --git a/airflow-core/src/airflow/serialization/encoders.py 
b/airflow-core/src/airflow/serialization/encoders.py
index 64a66dbe584..c113eb40b05 100644
--- a/airflow-core/src/airflow/serialization/encoders.py
+++ b/airflow-core/src/airflow/serialization/encoders.py
@@ -25,6 +25,7 @@ from typing import TYPE_CHECKING, Any, TypeVar, overload
 import attrs
 import pendulum
 
+from airflow._shared.module_loading import qualname
 from airflow.sdk import (
     Asset,
     AssetAlias,
@@ -46,7 +47,6 @@ from airflow.serialization.enums import DagAttributeTypes as 
DAT, Encoding
 from airflow.serialization.helpers import find_registered_custom_timetable, 
is_core_timetable_import_path
 from airflow.timetables.base import Timetable as CoreTimetable
 from airflow.utils.docs import get_docs_url
-from airflow.utils.module_loading import qualname
 
 if TYPE_CHECKING:
     from dateutil.relativedelta import relativedelta
diff --git a/airflow-core/src/airflow/serialization/serde.py 
b/airflow-core/src/airflow/serialization/serde.py
index 8aa90c5d9a1..fae6a76e670 100644
--- a/airflow-core/src/airflow/serialization/serde.py
+++ b/airflow-core/src/airflow/serialization/serde.py
@@ -31,10 +31,10 @@ from typing import TYPE_CHECKING, Any, TypeVar, cast
 import attr
 
 import airflow.serialization.serializers
+from airflow._shared.module_loading import import_string, iter_namespace, 
qualname
 from airflow.configuration import conf
 from airflow.observability.stats import Stats
 from airflow.serialization.typing import is_pydantic_model
-from airflow.utils.module_loading import import_string, iter_namespace, 
qualname
 
 if TYPE_CHECKING:
     from types import ModuleType
diff --git a/airflow-core/src/airflow/serialization/serialized_objects.py 
b/airflow-core/src/airflow/serialization/serialized_objects.py
index 505fe2cca2e..b4794ceb62d 100644
--- a/airflow-core/src/airflow/serialization/serialized_objects.py
+++ b/airflow-core/src/airflow/serialization/serialized_objects.py
@@ -56,6 +56,7 @@ from pendulum.tz.timezone import FixedTimezone, Timezone
 from sqlalchemy import func, or_, select, tuple_
 
 from airflow import macros
+from airflow._shared.module_loading import import_string, qualname
 from airflow._shared.timezones.timezone import coerce_datetime, 
from_timestamp, parse_timezone, utcnow
 from airflow.callbacks.callback_requests import DagCallbackRequest, 
TaskCallbackRequest
 from airflow.configuration import conf as airflow_conf
@@ -129,7 +130,6 @@ from airflow.utils.code_utils import get_python_source
 from airflow.utils.context import ConnectionAccessor, Context, VariableAccessor
 from airflow.utils.db import LazySelectSequence
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string, qualname
 from airflow.utils.session import NEW_SESSION, create_session, provide_session
 from airflow.utils.state import DagRunState, TaskInstanceState
 from airflow.utils.types import DagRunTriggeredByType, DagRunType
diff --git a/airflow-core/src/airflow/serialization/serializers/bignum.py 
b/airflow-core/src/airflow/serialization/serializers/bignum.py
index 5bb89cb386c..4dfa7dd2397 100644
--- a/airflow-core/src/airflow/serialization/serializers/bignum.py
+++ b/airflow-core/src/airflow/serialization/serializers/bignum.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 if TYPE_CHECKING:
     import decimal
diff --git a/airflow-core/src/airflow/serialization/serializers/builtin.py 
b/airflow-core/src/airflow/serialization/serializers/builtin.py
index 076831a05da..0edb56cb335 100644
--- a/airflow-core/src/airflow/serialization/serializers/builtin.py
+++ b/airflow-core/src/airflow/serialization/serializers/builtin.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING, cast
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 if TYPE_CHECKING:
     from airflow.serialization.serde import U
diff --git a/airflow-core/src/airflow/serialization/serializers/datetime.py 
b/airflow-core/src/airflow/serialization/serializers/datetime.py
index b5fe17a2e8b..9d455d010b2 100644
--- a/airflow-core/src/airflow/serialization/serializers/datetime.py
+++ b/airflow-core/src/airflow/serialization/serializers/datetime.py
@@ -19,12 +19,12 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
+from airflow._shared.module_loading import qualname
 from airflow._shared.timezones.timezone import parse_timezone
 from airflow.serialization.serializers.timezone import (
     deserialize as deserialize_timezone,
     serialize as serialize_timezone,
 )
-from airflow.utils.module_loading import qualname
 
 if TYPE_CHECKING:
     import datetime
diff --git a/airflow-core/src/airflow/serialization/serializers/deltalake.py 
b/airflow-core/src/airflow/serialization/serializers/deltalake.py
index a79b2317881..86cdd9c4cf5 100644
--- a/airflow-core/src/airflow/serialization/serializers/deltalake.py
+++ b/airflow-core/src/airflow/serialization/serializers/deltalake.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 serializers = ["deltalake.table.DeltaTable"]
 deserializers = serializers
diff --git a/airflow-core/src/airflow/serialization/serializers/iceberg.py 
b/airflow-core/src/airflow/serialization/serializers/iceberg.py
index 018732c29fe..d14d5a404b4 100644
--- a/airflow-core/src/airflow/serialization/serializers/iceberg.py
+++ b/airflow-core/src/airflow/serialization/serializers/iceberg.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 serializers = ["pyiceberg.table.Table"]
 deserializers = serializers
diff --git a/airflow-core/src/airflow/serialization/serializers/kubernetes.py 
b/airflow-core/src/airflow/serialization/serializers/kubernetes.py
index faa2312ac7a..5a6c2bbeb30 100644
--- a/airflow-core/src/airflow/serialization/serializers/kubernetes.py
+++ b/airflow-core/src/airflow/serialization/serializers/kubernetes.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 import logging
 from typing import TYPE_CHECKING
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 # lazy loading for performance reasons
 serializers = [
diff --git a/airflow-core/src/airflow/serialization/serializers/numpy.py 
b/airflow-core/src/airflow/serialization/serializers/numpy.py
index ad5c3caa7d0..300f47f6677 100644
--- a/airflow-core/src/airflow/serialization/serializers/numpy.py
+++ b/airflow-core/src/airflow/serialization/serializers/numpy.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING, Any
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 # lazy loading for performance reasons
 serializers = [
diff --git a/airflow-core/src/airflow/serialization/serializers/pandas.py 
b/airflow-core/src/airflow/serialization/serializers/pandas.py
index 73f64ce86b4..422dcec497d 100644
--- a/airflow-core/src/airflow/serialization/serializers/pandas.py
+++ b/airflow-core/src/airflow/serialization/serializers/pandas.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 # lazy loading for performance reasons
 serializers = [
diff --git a/airflow-core/src/airflow/serialization/serializers/pydantic.py 
b/airflow-core/src/airflow/serialization/serializers/pydantic.py
index b07cc634176..aa2d79485fe 100644
--- a/airflow-core/src/airflow/serialization/serializers/pydantic.py
+++ b/airflow-core/src/airflow/serialization/serializers/pydantic.py
@@ -19,8 +19,8 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
+from airflow._shared.module_loading import qualname
 from airflow.serialization.typing import is_pydantic_model
-from airflow.utils.module_loading import qualname
 
 if TYPE_CHECKING:
     from airflow.serialization.serde import U
diff --git a/airflow-core/src/airflow/serialization/serializers/timezone.py 
b/airflow-core/src/airflow/serialization/serializers/timezone.py
index 3a67dd8a95e..8ed0aad27a6 100644
--- a/airflow-core/src/airflow/serialization/serializers/timezone.py
+++ b/airflow-core/src/airflow/serialization/serializers/timezone.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 import datetime
 from typing import TYPE_CHECKING, Any, cast
 
-from airflow.utils.module_loading import qualname
+from airflow._shared.module_loading import qualname
 
 if TYPE_CHECKING:
     from airflow.serialization.serde import U
diff --git a/airflow-core/src/airflow/task/priority_strategy.py 
b/airflow-core/src/airflow/task/priority_strategy.py
index 90abf3aa0a3..a330ca9198b 100644
--- a/airflow-core/src/airflow/task/priority_strategy.py
+++ b/airflow-core/src/airflow/task/priority_strategy.py
@@ -139,8 +139,8 @@ def validate_and_load_priority_weight_strategy(
 
     :meta private:
     """
+    from airflow._shared.module_loading import qualname
     from airflow.serialization.serialized_objects import 
_get_registered_priority_weight_strategy
-    from airflow.utils.module_loading import qualname
 
     if priority_weight_strategy is None:
         return _AbsolutePriorityWeightStrategy()
diff --git a/airflow-core/src/airflow/triggers/callback.py 
b/airflow-core/src/airflow/triggers/callback.py
index c3f21d84faf..336aa232e96 100644
--- a/airflow-core/src/airflow/triggers/callback.py
+++ b/airflow-core/src/airflow/triggers/callback.py
@@ -22,9 +22,9 @@ import traceback
 from collections.abc import AsyncIterator
 from typing import Any
 
+from airflow._shared.module_loading import import_string, qualname
 from airflow.models.callback import CallbackState
 from airflow.triggers.base import BaseTrigger, TriggerEvent
-from airflow.utils.module_loading import import_string, qualname
 
 log = logging.getLogger(__name__)
 
diff --git a/airflow-core/src/airflow/utils/db_manager.py 
b/airflow-core/src/airflow/utils/db_manager.py
index 995cb81c803..ca3e181c1db 100644
--- a/airflow-core/src/airflow/utils/db_manager.py
+++ b/airflow-core/src/airflow/utils/db_manager.py
@@ -23,10 +23,10 @@ from alembic import command
 from sqlalchemy import inspect
 
 from airflow import settings
+from airflow._shared.module_loading import import_string
 from airflow.configuration import conf
 from airflow.exceptions import AirflowException
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from alembic.script import ScriptDirectory
diff --git a/airflow-core/src/airflow/utils/serve_logs/log_server.py 
b/airflow-core/src/airflow/utils/serve_logs/log_server.py
index 49f4df1ce5a..292d7bf5b16 100644
--- a/airflow-core/src/airflow/utils/serve_logs/log_server.py
+++ b/airflow-core/src/airflow/utils/serve_logs/log_server.py
@@ -33,10 +33,10 @@ from jwt.exceptions import (
     InvalidSignatureError,
 )
 
+from airflow._shared.module_loading import import_string
 from airflow.api_fastapi.auth.tokens import JWTValidator, get_signing_key
 from airflow.configuration import conf
 from airflow.utils.docs import get_docs_url
-from airflow.utils.module_loading import import_string
 
 logger = logging.getLogger(__name__)
 
diff --git a/airflow-core/tests/unit/always/test_project_structure.py 
b/airflow-core/tests/unit/always/test_project_structure.py
index 0d48def6941..b3e0236fd32 100644
--- a/airflow-core/tests/unit/always/test_project_structure.py
+++ b/airflow-core/tests/unit/always/test_project_structure.py
@@ -59,7 +59,6 @@ class TestProjectStructure:
         """
         # The test below had a but for quite a while and we missed a lot of 
modules to have tess
         # We should make sure that one goes to 0
-        # TODO(potiuk) - check if that test actually tests something
         OVERLOOKED_TESTS = [
             
"providers/amazon/tests/unit/amazon/aws/auth_manager/datamodels/test_login.py",
             
"providers/amazon/tests/unit/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py",
diff --git 
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py 
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
index 642fa1524c4..785904d3d81 100644
--- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
+++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
@@ -77,7 +77,7 @@ class CustomTimetable(CronDataIntervalTimetable):
 def custom_timetable_plugin(monkeypatch):
     """Fixture to register CustomTimetable for serialization."""
     from airflow import plugins_manager
-    from airflow.utils.module_loading import qualname
+    from airflow._shared.module_loading import qualname
 
     timetable_class_name = qualname(CustomTimetable)
     existing_timetables = getattr(plugins_manager, "timetable_classes", None) 
or {}
diff --git a/airflow-core/tests/unit/core/test_example_dags_system.py 
b/airflow-core/tests/unit/core/test_example_dags_system.py
index 9794307b454..06e81be3c89 100644
--- a/airflow-core/tests/unit/core/test_example_dags_system.py
+++ b/airflow-core/tests/unit/core/test_example_dags_system.py
@@ -23,9 +23,9 @@ import pendulum
 import pytest
 from sqlalchemy import select
 
+from airflow._shared.module_loading import import_string
 from airflow.models import DagRun
 from airflow.task.trigger_rule import TriggerRule
-from airflow.utils.module_loading import import_string
 from airflow.utils.state import DagRunState
 
 from tests_common.test_utils.compat import PythonOperator
diff --git a/airflow-core/tests/unit/models/test_dag.py 
b/airflow-core/tests/unit/models/test_dag.py
index ece2b456419..33b35d954bb 100644
--- a/airflow-core/tests/unit/models/test_dag.py
+++ b/airflow-core/tests/unit/models/test_dag.py
@@ -36,6 +36,7 @@ import time_machine
 from sqlalchemy import inspect, select
 
 from airflow import settings
+from airflow._shared.module_loading import qualname
 from airflow._shared.timezones import timezone
 from airflow._shared.timezones.timezone import datetime as datetime_tz
 from airflow.configuration import conf
@@ -81,7 +82,6 @@ from airflow.timetables.simple import (
     OnceTimetable,
 )
 from airflow.utils.file import list_py_file_paths
-from airflow.utils.module_loading import qualname
 from airflow.utils.session import create_session
 from airflow.utils.state import DagRunState, State, TaskInstanceState
 from airflow.utils.types import DagRunTriggeredByType, DagRunType
diff --git a/airflow-core/tests/unit/plugins/test_plugins_manager.py 
b/airflow-core/tests/unit/plugins/test_plugins_manager.py
index 6017a140913..c463f6a6273 100644
--- a/airflow-core/tests/unit/plugins/test_plugins_manager.py
+++ b/airflow-core/tests/unit/plugins/test_plugins_manager.py
@@ -27,9 +27,9 @@ from unittest import mock
 
 import pytest
 
+from airflow._shared.module_loading import qualname
 from airflow.listeners.listener import get_listener_manager
 from airflow.plugins_manager import AirflowPlugin
-from airflow.utils.module_loading import qualname
 
 from tests_common.test_utils.config import conf_vars
 from tests_common.test_utils.markers import 
skip_if_force_lowest_dependencies_marker
diff --git 
a/airflow-core/tests/unit/serialization/serializers/test_serializers.py 
b/airflow-core/tests/unit/serialization/serializers/test_serializers.py
index 271e1af9fa6..9152b506588 100644
--- a/airflow-core/tests/unit/serialization/serializers/test_serializers.py
+++ b/airflow-core/tests/unit/serialization/serializers/test_serializers.py
@@ -36,10 +36,10 @@ from pendulum.tz.timezone import FixedTimezone, Timezone
 from pydantic import BaseModel, Field
 from pydantic.dataclasses import dataclass as pydantic_dataclass
 
+from airflow._shared.module_loading import qualname
 from airflow.sdk.definitions.param import Param, ParamsDict
 from airflow.serialization.serde import CLASSNAME, DATA, VERSION, _stringify, 
decode, deserialize, serialize
 from airflow.serialization.serializers import builtin
-from airflow.utils.module_loading import qualname
 
 from tests_common.test_utils.markers import 
skip_if_force_lowest_dependencies_marker
 
diff --git a/airflow-core/tests/unit/serialization/test_dag_serialization.py 
b/airflow-core/tests/unit/serialization/test_dag_serialization.py
index bdcf97b0690..cc92a15410b 100644
--- a/airflow-core/tests/unit/serialization/test_dag_serialization.py
+++ b/airflow-core/tests/unit/serialization/test_dag_serialization.py
@@ -46,6 +46,7 @@ from dateutil.relativedelta import FR, relativedelta
 from kubernetes.client import models as k8s
 
 import airflow
+from airflow._shared.module_loading import qualname
 from airflow._shared.timezones import timezone
 from airflow.dag_processing.dagbag import DagBag
 from airflow.exceptions import (
@@ -83,7 +84,6 @@ from airflow.task.priority_strategy import 
_AbsolutePriorityWeightStrategy, _Dow
 from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep
 from airflow.timetables.simple import NullTimetable, OnceTimetable
 from airflow.triggers.base import StartTriggerArgs
-from airflow.utils.module_loading import qualname
 
 from tests_common.test_utils.config import conf_vars
 from tests_common.test_utils.markers import 
skip_if_force_lowest_dependencies_marker, skip_if_not_on_main
diff --git a/airflow-core/tests/unit/serialization/test_serde.py 
b/airflow-core/tests/unit/serialization/test_serde.py
index 9fb92523d0e..e79b83849df 100644
--- a/airflow-core/tests/unit/serialization/test_serde.py
+++ b/airflow-core/tests/unit/serialization/test_serde.py
@@ -29,6 +29,7 @@ import pytest
 from packaging import version
 from pydantic import BaseModel
 
+from airflow._shared.module_loading import import_string, iter_namespace, 
qualname
 from airflow.sdk.definitions.asset import Asset
 from airflow.serialization.serde import (
     CLASSNAME,
@@ -43,7 +44,6 @@ from airflow.serialization.serde import (
     deserialize,
     serialize,
 )
-from airflow.utils.module_loading import import_string, iter_namespace, 
qualname
 
 from tests_common.test_utils.config import conf_vars
 
diff --git a/devel-common/src/tests_common/test_utils/providers.py 
b/devel-common/src/tests_common/test_utils/providers.py
index 7d92d9dcc9a..bdd634c7275 100644
--- a/devel-common/src/tests_common/test_utils/providers.py
+++ b/devel-common/src/tests_common/test_utils/providers.py
@@ -22,7 +22,7 @@ import semver
 
 def object_exists(path: str):
     """Return true if importable python object is there."""
-    from airflow.utils.module_loading import import_string
+    from airflow_shared.module_loading import import_string
 
     try:
         import_string(path)
diff --git a/providers/apache/kafka/pyproject.toml 
b/providers/apache/kafka/pyproject.toml
index 7a59364728b..441d4e40419 100644
--- a/providers/apache/kafka/pyproject.toml
+++ b/providers/apache/kafka/pyproject.toml
@@ -58,7 +58,7 @@ requires-python = ">=3.10,!=3.13"
 # After you modify the dependencies, and rebuild your Breeze CI image with 
``breeze ci-image build``
 dependencies = [
     "apache-airflow>=2.11.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1", # use next version
     "asgiref>=2.3.0",
     "confluent-kafka>=2.6.0",
 ]
diff --git 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/consume.py 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/consume.py
index 67d594ae7c8..fa2f5844927 100644
--- a/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/consume.py
+++ b/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/consume.py
@@ -21,7 +21,7 @@ from collections.abc import Sequence
 from confluent_kafka import Consumer, KafkaError
 
 from airflow.providers.apache.kafka.hooks.base import KafkaBaseHook
-from airflow.utils.module_loading import import_string
+from airflow.providers.common.compat.module_loading import import_string
 
 
 class KafkaAuthenticationError(Exception):
diff --git 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py
 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py
index 853ea10b46e..3456534cce0 100644
--- 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py
+++ 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/consume.py
@@ -21,8 +21,8 @@ from functools import cached_property, partial
 from typing import Any
 
 from airflow.providers.apache.kafka.hooks.consume import KafkaConsumerHook
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
-from airflow.utils.module_loading import import_string
 
 VALID_COMMIT_CADENCE = {"never", "end_of_batch", "end_of_operator"}
 
diff --git 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py
 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py
index 78d5e8db931..b8b28b929df 100644
--- 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py
+++ 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/operators/produce.py
@@ -22,8 +22,8 @@ from functools import partial
 from typing import Any
 
 from airflow.providers.apache.kafka.hooks.produce import KafkaProducerHook
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
-from airflow.utils.module_loading import import_string
 
 local_logger = logging.getLogger("airflow")
 
diff --git 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/await_message.py
 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/await_message.py
index d42ffb90c01..0e431b82897 100644
--- 
a/providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/await_message.py
+++ 
b/providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/await_message.py
@@ -25,9 +25,9 @@ from asgiref.sync import sync_to_async
 
 from airflow.providers.apache.kafka.hooks.consume import KafkaConsumerHook
 from airflow.providers.apache.kafka.version_compat import AIRFLOW_V_3_0_PLUS
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException
 from airflow.triggers.base import TriggerEvent
-from airflow.utils.module_loading import import_string
 
 if AIRFLOW_V_3_0_PLUS:
     from airflow.triggers.base import BaseEventTrigger
diff --git 
a/providers/common/compat/src/airflow/providers/common/compat/module_loading/__init__.py
 
b/providers/common/compat/src/airflow/providers/common/compat/module_loading/__init__.py
new file mode 100644
index 00000000000..196b283c826
--- /dev/null
+++ 
b/providers/common/compat/src/airflow/providers/common/compat/module_loading/__init__.py
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+try:
+    from airflow.utils.module_loading import (
+        import_string,
+        iter_namespace,
+        qualname,
+    )
+
+except ImportError:
+    from airflow.sdk.module_loading import import_string, iter_namespace, 
qualname
+
+try:
+    # This function was not available in Airflow 3.0/3.1 in module_loading, 
but it's good to keep it in the
+    # same shared module - good for reuse
+    from airflow.sdk._shared.module_loading import is_valid_dotpath
+
+except ImportError:
+    # TODO: Remove it when Airflow 3.2.0 is the minimum version
+    def is_valid_dotpath(path: str) -> bool:
+        import re
+
+        if not isinstance(path, str):
+            return False
+        pattern = r"^[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z_][a-zA-Z0-9_]*)*$"
+        return bool(re.match(pattern, path))
+
+
+__all__ = [
+    "import_string",
+    "qualname",
+    "iter_namespace",
+]
diff --git a/providers/common/sql/pyproject.toml 
b/providers/common/sql/pyproject.toml
index e44c40d9bfd..38e8754ce85 100644
--- a/providers/common/sql/pyproject.toml
+++ b/providers/common/sql/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
 # After you modify the dependencies, and rebuild your Breeze CI image with 
``breeze ci-image build``
 dependencies = [
     "apache-airflow>=2.11.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1", # use next version
     "sqlparse>=0.5.1",
     "more-itertools>=9.0.0",
     # The methodtools dependency is necessary since the introduction of 
dialects:
diff --git a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py 
b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
index 96ffe8350ad..b4553017246 100644
--- a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
+++ b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
@@ -35,10 +35,10 @@ from sqlalchemy.exc import ArgumentError, NoSuchModuleError
 
 from airflow.configuration import conf
 from airflow.exceptions import AirflowOptionalProviderFeatureException, 
AirflowProviderDeprecationWarning
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException, BaseHook
 from airflow.providers.common.sql.dialects.dialect import Dialect
 from airflow.providers.common.sql.hooks import handlers
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from pandas import DataFrame as PandasDataFrame
@@ -333,7 +333,7 @@ class DbApiHook(BaseHook):
 
     @cached_property
     def dialect(self) -> Dialect:
-        from airflow.utils.module_loading import import_string
+        from airflow.providers.common.compat.module_loading import 
import_string
 
         dialect_info = self._dialects.get(self.dialect_name)
 
diff --git a/providers/elasticsearch/pyproject.toml 
b/providers/elasticsearch/pyproject.toml
index 958fbce2326..166bb78ebed 100644
--- a/providers/elasticsearch/pyproject.toml
+++ b/providers/elasticsearch/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
 # After you modify the dependencies, and rebuild your Breeze CI image with 
``breeze ci-image build``
 dependencies = [
     "apache-airflow>=2.11.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1", # use next version
     "apache-airflow-providers-common-sql>=1.27.0",
     "elasticsearch>=8.10,<9",
 ]
diff --git 
a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py
 
b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py
index 0cbdf830490..d01bfa619e7 100644
--- 
a/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py
+++ 
b/providers/elasticsearch/src/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -40,13 +40,13 @@ from elasticsearch.exceptions import NotFoundError
 
 from airflow.configuration import conf
 from airflow.models.dagrun import DagRun
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException, timezone
 from airflow.providers.elasticsearch.log.es_json_formatter import 
ElasticsearchJSONFormatter
 from airflow.providers.elasticsearch.log.es_response import 
ElasticSearchResponse, Hit
 from airflow.providers.elasticsearch.version_compat import AIRFLOW_V_3_0_PLUS
 from airflow.utils.log.file_task_handler import FileTaskHandler
 from airflow.utils.log.logging_mixin import ExternalLoggingMixin, LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.session import create_session
 
 if TYPE_CHECKING:
diff --git a/providers/microsoft/azure/pyproject.toml 
b/providers/microsoft/azure/pyproject.toml
index c6128775a3d..42345c84d62 100644
--- a/providers/microsoft/azure/pyproject.toml
+++ b/providers/microsoft/azure/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
 # After you modify the dependencies, and rebuild your Breeze CI image with 
``breeze ci-image build``
 dependencies = [
     "apache-airflow>=2.11.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1",  # use next version
     "adlfs>=2023.10.0",
     "azure-batch>=8.0.0",
     "azure-cosmos>=4.6.0",
diff --git 
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/msgraph.py
 
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/msgraph.py
index 4cb3ce76061..2ff0761b1a2 100644
--- 
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/msgraph.py
+++ 
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/msgraph.py
@@ -33,9 +33,9 @@ from uuid import UUID
 
 import pendulum
 
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.microsoft.azure.hooks.msgraph import 
KiotaRequestAdapterHook
 from airflow.triggers.base import BaseTrigger, TriggerEvent
-from airflow.utils.module_loading import import_string
 
 if TYPE_CHECKING:
     from io import BytesIO
diff --git a/providers/openlineage/pyproject.toml 
b/providers/openlineage/pyproject.toml
index da5bceca4be..46fa96ce5fc 100644
--- a/providers/openlineage/pyproject.toml
+++ b/providers/openlineage/pyproject.toml
@@ -60,7 +60,7 @@ requires-python = ">=3.10"
 dependencies = [
     "apache-airflow>=2.11.0",
     "apache-airflow-providers-common-sql>=1.20.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1",  # use next version
     "attrs>=22.2",
     "openlineage-integration-common>=1.40.0",
     "openlineage-python>=1.40.0",
diff --git 
a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py 
b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
index 2b1af47014e..6f70672516c 100644
--- a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
+++ b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
@@ -36,6 +36,7 @@ from airflow import __version__ as AIRFLOW_VERSION
 from airflow.models import DagRun, TaskInstance, TaskReschedule
 from airflow.models.mappedoperator import MappedOperator as 
SerializedMappedOperator
 from airflow.providers.common.compat.assets import Asset
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import DAG, BaseOperator, 
BaseSensorOperator, MappedOperator
 from airflow.providers.openlineage import (
     __version__ as OPENLINEAGE_PROVIDER_VERSION,
@@ -57,7 +58,6 @@ from airflow.providers.openlineage.utils.selective_enable 
import (
 )
 from airflow.providers.openlineage.version_compat import AIRFLOW_V_3_0_PLUS, 
get_base_airflow_version_tuple
 from airflow.serialization.serialized_objects import SerializedBaseOperator, 
SerializedDAG
-from airflow.utils.module_loading import import_string
 
 if not AIRFLOW_V_3_0_PLUS:
     from airflow.utils.session import NEW_SESSION, provide_session
diff --git a/providers/opensearch/pyproject.toml 
b/providers/opensearch/pyproject.toml
index e4c1c7aff2b..29ef9684d67 100644
--- a/providers/opensearch/pyproject.toml
+++ b/providers/opensearch/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
 # After you modify the dependencies, and rebuild your Breeze CI image with 
``breeze ci-image build``
 dependencies = [
     "apache-airflow>=2.11.0",
-    "apache-airflow-providers-common-compat>=1.10.1",
+    "apache-airflow-providers-common-compat>=1.10.1",  # use next version
     "opensearch-py>=2.2.0",
 ]
 
diff --git 
a/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py 
b/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py
index fae48f70ef1..61a5db72f83 100644
--- 
a/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py
+++ 
b/providers/opensearch/src/airflow/providers/opensearch/log/os_task_handler.py
@@ -34,6 +34,7 @@ from opensearchpy.exceptions import NotFoundError
 
 from airflow.configuration import conf
 from airflow.models import DagRun
+from airflow.providers.common.compat.module_loading import import_string
 from airflow.providers.common.compat.sdk import AirflowException
 from airflow.providers.opensearch.log.os_json_formatter import 
OpensearchJSONFormatter
 from airflow.providers.opensearch.log.os_response import Hit, 
OpensearchResponse
@@ -41,7 +42,6 @@ from airflow.providers.opensearch.version_compat import 
AIRFLOW_V_3_0_PLUS
 from airflow.utils import timezone
 from airflow.utils.log.file_task_handler import FileTaskHandler
 from airflow.utils.log.logging_mixin import ExternalLoggingMixin, LoggingMixin
-from airflow.utils.module_loading import import_string
 from airflow.utils.session import create_session
 
 if TYPE_CHECKING:
diff --git a/pyproject.toml b/pyproject.toml
index 7a0f81426e1..294846d93aa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1292,6 +1292,7 @@ dev = [
     "apache-airflow-ctl-tests",
     "apache-airflow-shared-configuration",
     "apache-airflow-shared-logging",
+    "apache-airflow-shared-module-loading",
     "apache-airflow-shared-secrets-backend",
     "apache-airflow-shared-secrets-masker",
     "apache-airflow-shared-timezones",
@@ -1346,6 +1347,7 @@ apache-airflow-providers = { workspace = true }
 apache-aurflow-docker-stack = { workspace = true }
 apache-airflow-shared-configuration = { workspace = true }
 apache-airflow-shared-logging = { workspace = true }
+apache-airflow-shared-module-loading = { workspace = true }
 apache-airflow-shared-secrets-backend = { workspace = true }
 apache-airflow-shared-secrets-masker = { workspace = true }
 apache-airflow-shared-timezones = { workspace = true }
@@ -1468,6 +1470,7 @@ members = [
     "providers-summary-docs",
     "docker-stack-docs",
     "shared/configuration",
+    "shared/module_loading",
     "shared/logging",
     "shared/secrets_backend",
     "shared/secrets_masker",
diff --git a/shared/configuration/pyproject.toml 
b/shared/configuration/pyproject.toml
index 839dbf69fb9..3e65bec3cbb 100644
--- a/shared/configuration/pyproject.toml
+++ b/shared/configuration/pyproject.toml
@@ -33,6 +33,7 @@ dependencies = [
 [dependency-groups]
 dev = [
     "apache-airflow-devel-common",
+    "apache-airflow-shared-module-loading",
 ]
 
 [build-system]
diff --git a/shared/configuration/src/airflow_shared/configuration/parser.py 
b/shared/configuration/src/airflow_shared/configuration/parser.py
index f704780acff..ed935b4c958 100644
--- a/shared/configuration/src/airflow_shared/configuration/parser.py
+++ b/shared/configuration/src/airflow_shared/configuration/parser.py
@@ -1150,7 +1150,7 @@ class AirflowConfigParser(ConfigParser):
 
         try:
             # Import here to avoid circular dependency
-            from airflow.utils.module_loading import import_string
+            from ..module_loading import import_string
 
             return import_string(full_qualified_path)
         except ImportError as e:
diff --git a/shared/configuration/pyproject.toml 
b/shared/module_loading/pyproject.toml
similarity index 85%
copy from shared/configuration/pyproject.toml
copy to shared/module_loading/pyproject.toml
index 839dbf69fb9..a063506355e 100644
--- a/shared/configuration/pyproject.toml
+++ b/shared/module_loading/pyproject.toml
@@ -16,18 +16,14 @@
 # under the License.
 
 [project]
-name = "apache-airflow-shared-configuration"
-description = "Shared configuration parser code for Airflow distributions"
+name = "apache-airflow-shared-module-loading"
+description = "Shared import utilities code for Airflow distributions"
 version = "0.0"
 classifiers = [
     "Private :: Do Not Upload",
 ]
 
 dependencies = [
-    "packaging>=25.0",
-    "typing-extensions>=4.14.1",
-    "pendulum>=3.1.0",
-    "methodtools>=0.4.7",
 ]
 
 [dependency-groups]
diff --git a/airflow-core/src/airflow/utils/module_loading.py 
b/shared/module_loading/src/airflow_shared/module_loading/__init__.py
similarity index 77%
rename from airflow-core/src/airflow/utils/module_loading.py
rename to shared/module_loading/src/airflow_shared/module_loading/__init__.py
index e0ec74bcb1f..dc732ab4002 100644
--- a/airflow-core/src/airflow/utils/module_loading.py
+++ b/shared/module_loading/src/airflow_shared/module_loading/__init__.py
@@ -67,3 +67,25 @@ def qualname(o: object | Callable) -> str:
 
 def iter_namespace(ns: ModuleType):
     return pkgutil.iter_modules(ns.__path__, ns.__name__ + ".")
+
+
+def is_valid_dotpath(path: str) -> bool:
+    """
+    Check if a string follows valid dotpath format (ie: 
'package.subpackage.module').
+
+    :param path: String to check
+    """
+    import re
+
+    if not isinstance(path, str):
+        return False
+
+    # Pattern explanation:
+    # ^            - Start of string
+    # [a-zA-Z_]    - Must start with letter or underscore
+    # [a-zA-Z0-9_] - Following chars can be letters, numbers, or underscores
+    # (\.[a-zA-Z_][a-zA-Z0-9_]*)*  - Can be followed by dots and valid 
identifiers
+    # $            - End of string
+    pattern = r"^[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z_][a-zA-Z0-9_]*)*$"
+
+    return bool(re.match(pattern, path))
diff --git a/airflow-core/tests/unit/utils/test_module_loading.py 
b/shared/module_loading/tests/conftest.py
similarity index 59%
copy from airflow-core/tests/unit/utils/test_module_loading.py
copy to shared/module_loading/tests/conftest.py
index 1f92a004b8f..93aecf26184 100644
--- a/airflow-core/tests/unit/utils/test_module_loading.py
+++ b/shared/module_loading/tests/conftest.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -17,19 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import pytest
-
-from airflow.utils.module_loading import import_string
-
-
-class TestModuleImport:
-    def test_import_string(self):
-        cls = import_string("airflow.utils.module_loading.import_string")
-        assert cls == import_string
+import os
 
-        # Test exceptions raised
-        with pytest.raises(ImportError):
-            import_string("no_dots_in_path")
-        msg = 'Module "airflow.utils" does not define a "nonexistent" 
attribute'
-        with pytest.raises(ImportError, match=msg):
-            import_string("airflow.utils.nonexistent")
+os.environ["_AIRFLOW__AS_LIBRARY"] = "true"
diff --git a/airflow-core/tests/unit/utils/test_module_loading.py 
b/shared/module_loading/tests/module_loading/__init__.py
similarity index 57%
rename from airflow-core/tests/unit/utils/test_module_loading.py
rename to shared/module_loading/tests/module_loading/__init__.py
index 1f92a004b8f..217e5db9607 100644
--- a/airflow-core/tests/unit/utils/test_module_loading.py
+++ b/shared/module_loading/tests/module_loading/__init__.py
@@ -15,21 +15,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from __future__ import annotations
-
-import pytest
-
-from airflow.utils.module_loading import import_string
-
-
-class TestModuleImport:
-    def test_import_string(self):
-        cls = import_string("airflow.utils.module_loading.import_string")
-        assert cls == import_string
-
-        # Test exceptions raised
-        with pytest.raises(ImportError):
-            import_string("no_dots_in_path")
-        msg = 'Module "airflow.utils" does not define a "nonexistent" 
attribute'
-        with pytest.raises(ImportError, match=msg):
-            import_string("airflow.utils.nonexistent")
diff --git a/task-sdk/tests/task_sdk/definitions/test_module_loading.py 
b/shared/module_loading/tests/module_loading/test_module_loading.py
similarity index 72%
copy from task-sdk/tests/task_sdk/definitions/test_module_loading.py
copy to shared/module_loading/tests/module_loading/test_module_loading.py
index 979d92b0665..38cd541f0ba 100644
--- a/task-sdk/tests/task_sdk/definitions/test_module_loading.py
+++ b/shared/module_loading/tests/module_loading/test_module_loading.py
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -18,7 +19,24 @@ from __future__ import annotations
 
 import pytest
 
-from airflow.sdk.module_loading import is_valid_dotpath
+from airflow_shared.module_loading import import_string, is_valid_dotpath
+
+
+def _import_string():
+    pass
+
+
+class TestModuleImport:
+    def test_import_string(self):
+        cls = 
import_string("module_loading.test_module_loading._import_string")
+        assert cls == _import_string
+
+        # Test exceptions raised
+        with pytest.raises(ImportError):
+            import_string("no_dots_in_path")
+        msg = 'Module "module_loading.test_module_loading" does not define a 
"nonexistent" attribute'
+        with pytest.raises(ImportError, match=msg):
+            import_string("module_loading.test_module_loading.nonexistent")
 
 
 class TestModuleLoading:
diff --git a/task-sdk/pyproject.toml b/task-sdk/pyproject.toml
index 97e299fa164..98d91657634 100644
--- a/task-sdk/pyproject.toml
+++ b/task-sdk/pyproject.toml
@@ -116,11 +116,12 @@ path = "src/airflow/sdk/__init__.py"
 
 [tool.hatch.build.targets.sdist.force-include]
 "../shared/configuration/src/airflow_shared/configuration" = 
"src/airflow/sdk/_shared/configuration"
+"../shared/module_loading/src/airflow_shared/module_loading" = 
"src/airflow/sdk/_shared/module_loading"
 "../shared/logging/src/airflow_shared/logging" = 
"src/airflow/sdk/_shared/logging"
+"../shared/observability/src/airflow_shared/observability" = 
"src/airflow/_shared/observability"
+"../shared/secrets_backend/src/airflow_shared/secrets_backend" = 
"src/airflow/sdk/_shared/secrets_backend"
 "../shared/secrets_masker/src/airflow_shared/secrets_masker" = 
"src/airflow/sdk/_shared/secrets_masker"
 "../shared/timezones/src/airflow_shared/timezones" = 
"src/airflow/sdk/_shared/timezones"
-"../shared/secrets_backend/src/airflow_shared/secrets_backend" = 
"src/airflow/sdk/_shared/secrets_backend"
-"../shared/observability/src/airflow_shared/observability" = 
"src/airflow/_shared/observability"
 
 [tool.hatch.build.targets.wheel]
 packages = ["src/airflow"]
@@ -264,6 +265,7 @@ tmp_path_retention_policy = "failed"
 shared_distributions = [
     "apache-airflow-shared-configuration",
     "apache-airflow-shared-logging",
+    "apache-airflow-shared-module-loading",
     "apache-airflow-shared-secrets-backend",
     "apache-airflow-shared-secrets-masker",
     "apache-airflow-shared-timezones",
diff --git a/task-sdk/src/airflow/sdk/_shared/module_loading 
b/task-sdk/src/airflow/sdk/_shared/module_loading
new file mode 120000
index 00000000000..17709708d89
--- /dev/null
+++ b/task-sdk/src/airflow/sdk/_shared/module_loading
@@ -0,0 +1 @@
+../../../../../shared/module_loading/src/airflow_shared/module_loading
\ No newline at end of file
diff --git a/task-sdk/src/airflow/sdk/configuration.py 
b/task-sdk/src/airflow/sdk/configuration.py
index e60df4e13b7..e514940fda7 100644
--- a/task-sdk/src/airflow/sdk/configuration.py
+++ b/task-sdk/src/airflow/sdk/configuration.py
@@ -195,7 +195,7 @@ def initialize_secrets_backends(
 
     Uses SDK's conf instead of Core's conf.
     """
-    from airflow.sdk.module_loading import import_string
+    from airflow.sdk._shared.module_loading import import_string
 
     backend_list = []
     worker_mode = False
diff --git a/task-sdk/src/airflow/sdk/definitions/callback.py 
b/task-sdk/src/airflow/sdk/definitions/callback.py
index d8cdd0f87ab..4280a43513d 100644
--- a/task-sdk/src/airflow/sdk/definitions/callback.py
+++ b/task-sdk/src/airflow/sdk/definitions/callback.py
@@ -23,7 +23,7 @@ from typing import Any
 
 import structlog
 
-from airflow.sdk.module_loading import import_string, is_valid_dotpath
+from airflow.sdk._shared.module_loading import import_string, is_valid_dotpath
 
 log = structlog.getLogger(__name__)
 
diff --git a/task-sdk/src/airflow/sdk/definitions/connection.py 
b/task-sdk/src/airflow/sdk/definitions/connection.py
index 7e0ca4f44dd..bcf7937f034 100644
--- a/task-sdk/src/airflow/sdk/definitions/connection.py
+++ b/task-sdk/src/airflow/sdk/definitions/connection.py
@@ -189,7 +189,7 @@ class Connection:
     def get_hook(self, *, hook_params=None):
         """Return hook based on conn_type."""
         from airflow.providers_manager import ProvidersManager
-        from airflow.sdk.module_loading import import_string
+        from airflow.sdk._shared.module_loading import import_string
 
         hook = ProvidersManager().hooks.get(self.conn_type, None)
 
diff --git a/task-sdk/src/airflow/sdk/definitions/dag.py 
b/task-sdk/src/airflow/sdk/definitions/dag.py
index 1c363ea9b1f..c0be97d7765 100644
--- a/task-sdk/src/airflow/sdk/definitions/dag.py
+++ b/task-sdk/src/airflow/sdk/definitions/dag.py
@@ -1394,7 +1394,7 @@ def _run_task(
     Bypasses a lot of extra steps used in `task.run` to keep our local running 
as fast as
     possible.  This function is only meant for the `dag.test` function as a 
helper function.
     """
-    from airflow.sdk.module_loading import import_string
+    from airflow.sdk._shared.module_loading import import_string
 
     taskrun_result: TaskRunResult | None
     log.info("[DAG TEST] starting task_id=%s map_index=%s", ti.task_id, 
ti.map_index)
diff --git a/task-sdk/src/airflow/sdk/io/fs.py 
b/task-sdk/src/airflow/sdk/io/fs.py
index c9184bc1967..a7d9075fb79 100644
--- a/task-sdk/src/airflow/sdk/io/fs.py
+++ b/task-sdk/src/airflow/sdk/io/fs.py
@@ -25,7 +25,7 @@ from typing import TYPE_CHECKING
 from fsspec.implementations.local import LocalFileSystem
 
 from airflow.providers_manager import ProvidersManager
-from airflow.sdk.module_loading import import_string
+from airflow.sdk._shared.module_loading import import_string
 from airflow.sdk.observability.stats import Stats
 
 if TYPE_CHECKING:
diff --git a/task-sdk/src/airflow/sdk/io/store.py 
b/task-sdk/src/airflow/sdk/io/store.py
index 30a606f3832..b7d7e379e65 100644
--- a/task-sdk/src/airflow/sdk/io/store.py
+++ b/task-sdk/src/airflow/sdk/io/store.py
@@ -78,7 +78,7 @@ class ObjectStore:
             return f"{self.fs.protocol}-{self.conn_id or 'env'}"
 
     def serialize(self):
-        from airflow.sdk.module_loading import qualname
+        from airflow.sdk._shared.module_loading import qualname
 
         return {
             "protocol": self.protocol,
diff --git a/task-sdk/src/airflow/sdk/module_loading.py 
b/task-sdk/src/airflow/sdk/module_loading.py
index 05756a79108..d441612d749 100644
--- a/task-sdk/src/airflow/sdk/module_loading.py
+++ b/task-sdk/src/airflow/sdk/module_loading.py
@@ -14,70 +14,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 from __future__ import annotations
 
-from collections.abc import Callable
-from importlib import import_module
-
-
-def import_string(dotted_path: str):
-    """
-    Import a dotted module path and return the attribute/class designated by 
the last name in the path.
-
-    Note: Only supports top-level attributes or classes.
-
-    Raise ImportError if the import failed.
-    """
-    try:
-        module_path, class_name = dotted_path.rsplit(".", 1)
-    except ValueError:
-        raise ImportError(f"{dotted_path} doesn't look like a module path")
-
-    module = import_module(module_path)
-
-    try:
-        return getattr(module, class_name)
-    except AttributeError:
-        raise ImportError(f'Module "{module_path}" does not define a 
"{class_name}" attribute/class')
-
-
-def qualname(o: object | Callable) -> str:
-    """Convert an attribute/class/function to a string importable by 
``import_string``."""
-    if callable(o) and hasattr(o, "__module__") and hasattr(o, "__name__"):
-        return f"{o.__module__}.{o.__name__}"
-
-    cls = o
-
-    if not isinstance(cls, type):  # instance or class
-        cls = type(cls)
-
-    name = cls.__qualname__
-    module = cls.__module__
-
-    if module and module != "__builtin__":
-        return f"{module}.{name}"
-
-    return name
-
-
-def is_valid_dotpath(path: str) -> bool:
-    """
-    Check if a string follows valid dotpath format (ie: 
'package.subpackage.module').
-
-    :param path: String to check
-    """
-    import re
-
-    if not isinstance(path, str):
-        return False
-
-    # Pattern explanation:
-    # ^            - Start of string
-    # [a-zA-Z_]    - Must start with letter or underscore
-    # [a-zA-Z0-9_] - Following chars can be letters, numbers, or underscores
-    # (\.[a-zA-Z_][a-zA-Z0-9_]*)*  - Can be followed by dots and valid 
identifiers
-    # $            - End of string
-    pattern = r"^[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z_][a-zA-Z0-9_]*)*$"
-
-    return bool(re.match(pattern, path))
+from airflow.sdk._shared.module_loading import (  # noqa: F401
+    import_string,
+    is_valid_dotpath,
+    iter_namespace,
+    qualname,
+)
diff --git a/task-sdk/tests/task_sdk/definitions/test_callback.py 
b/task-sdk/tests/task_sdk/definitions/test_callback.py
index be2a72d9c98..8b2bdb1bdc2 100644
--- a/task-sdk/tests/task_sdk/definitions/test_callback.py
+++ b/task-sdk/tests/task_sdk/definitions/test_callback.py
@@ -20,8 +20,8 @@ from typing import cast
 
 import pytest
 
+from airflow.sdk._shared.module_loading import qualname
 from airflow.sdk.definitions.callback import AsyncCallback, Callback, 
SyncCallback
-from airflow.sdk.module_loading import qualname
 from airflow.serialization.serde import deserialize, serialize
 
 
diff --git a/task-sdk/tests/task_sdk/definitions/test_connection.py 
b/task-sdk/tests/task_sdk/definitions/test_connection.py
index 7267eeace90..8fca258ec79 100644
--- a/task-sdk/tests/task_sdk/definitions/test_connection.py
+++ b/task-sdk/tests/task_sdk/definitions/test_connection.py
@@ -39,7 +39,7 @@ class TestConnections:
         with mock.patch("airflow.providers_manager.ProvidersManager") as 
mock_manager:
             yield mock_manager
 
-    @mock.patch("airflow.sdk.module_loading.import_string")
+    @mock.patch("airflow.sdk._shared.module_loading.import_string")
     def test_get_hook(self, mock_import_string, mock_providers_manager):
         """Test that get_hook returns the correct hook instance."""
 
diff --git a/task-sdk/tests/task_sdk/definitions/test_module_loading.py 
b/task-sdk/tests/task_sdk/definitions/test_module_loading.py
index 979d92b0665..21d298ede6e 100644
--- a/task-sdk/tests/task_sdk/definitions/test_module_loading.py
+++ b/task-sdk/tests/task_sdk/definitions/test_module_loading.py
@@ -15,26 +15,3 @@
 # specific language governing permissions and limitations
 # under the License.
 from __future__ import annotations
-
-import pytest
-
-from airflow.sdk.module_loading import is_valid_dotpath
-
-
-class TestModuleLoading:
-    @pytest.mark.parametrize(
-        ("path", "expected"),
-        [
-            pytest.param("valid_path", True, id="module_no_dots"),
-            pytest.param("valid.dot.path", True, id="standard_dotpath"),
-            pytest.param("package.sub_package.module", True, 
id="dotpath_with_underscores"),
-            pytest.param("MyPackage.MyClass", True, id="mixed_case_path"),
-            pytest.param("invalid..path", False, id="consecutive_dots_fails"),
-            pytest.param(".invalid.path", False, id="leading_dot_fails"),
-            pytest.param("invalid.path.", False, id="trailing_dot_fails"),
-            pytest.param("1invalid.path", False, id="leading_number_fails"),
-            pytest.param(42, False, id="not_a_string"),
-        ],
-    )
-    def test_is_valid_dotpath(self, path, expected):
-        assert is_valid_dotpath(path) == expected
diff --git a/task-sdk/tests/task_sdk/execution_time/test_sentry.py 
b/task-sdk/tests/task_sdk/execution_time/test_sentry.py
index 37d4e4f5f11..c7f2daccdc4 100644
--- a/task-sdk/tests/task_sdk/execution_time/test_sentry.py
+++ b/task-sdk/tests/task_sdk/execution_time/test_sentry.py
@@ -27,11 +27,11 @@ import pytest
 import uuid6
 
 from airflow.providers.standard.operators.python import PythonOperator
+from airflow.sdk._shared.module_loading import import_string
 from airflow.sdk._shared.timezones import timezone
 from airflow.sdk.api.datamodels._generated import DagRun, DagRunState, 
DagRunType, TaskInstanceState
 from airflow.sdk.execution_time.comms import GetTaskBreadcrumbs, 
TaskBreadcrumbsResult
 from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
-from airflow.sdk.module_loading import import_string
 
 from tests_common.test_utils.config import conf_vars
 
diff --git a/task-sdk/tests/task_sdk/io/test_path.py 
b/task-sdk/tests/task_sdk/io/test_path.py
index 519ca4046a6..dd338e09f9d 100644
--- a/task-sdk/tests/task_sdk/io/test_path.py
+++ b/task-sdk/tests/task_sdk/io/test_path.py
@@ -28,9 +28,9 @@ from fsspec.implementations.local import LocalFileSystem
 from fsspec.implementations.memory import MemoryFileSystem
 
 from airflow.sdk import Asset, ObjectStoragePath
+from airflow.sdk._shared.module_loading import qualname
 from airflow.sdk.io import attach
 from airflow.sdk.io.store import _STORE_CACHE, ObjectStore
-from airflow.sdk.module_loading import qualname
 
 
 def test_init():

Reply via email to