This is an automated email from the ASF dual-hosted git repository.
jscheffl pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 4d3c5ead3b5 Consume ``AirflowOptionalProviderFeatureException`` from
compat sdk in providers (#60335)
4d3c5ead3b5 is described below
commit 4d3c5ead3b570f9c6a395d7aa63d856270fc5619
Author: Amogh Desai <[email protected]>
AuthorDate: Tue Jan 13 03:59:54 2026 +0530
Consume ``AirflowOptionalProviderFeatureException`` from compat sdk in
providers (#60335)
* Consume AirflowOptionalProviderFeatureException from compat sdk in
providers
* fixing static checks
* fixing static checks
---
.../amazon/aws/auth_manager/aws_auth_manager.py | 3 +-
.../amazon/aws/auth_manager/cli/avp_commands.py | 3 +-
.../providers/amazon/aws/auth_manager/user.py | 2 +-
.../providers/amazon/aws/hooks/athena_sql.py | 2 +-
.../providers/amazon/aws/hooks/redshift_sql.py | 3 +-
.../src/airflow/providers/amazon/aws/queues/sqs.py | 2 +-
.../amazon/aws/transfers/azure_blob_to_s3.py | 2 +-
.../providers/amazon/aws/transfers/sql_to_s3.py | 2 +-
.../unit/amazon/aws/hooks/test_hooks_signature.py | 2 +-
.../airflow/providers/apache/beam/hooks/beam.py | 2 +-
.../providers/apache/beam/operators/beam.py | 8 ++++--
.../airflow/providers/apache/beam/triggers/beam.py | 2 +-
.../airflow/providers/apache/hive/hooks/hive.py | 4 +--
providers/apache/impala/docs/index.rst | 28 ++++++++++---------
providers/apache/impala/pyproject.toml | 2 ++
.../providers/apache/impala/hooks/impala.py | 2 +-
providers/apache/kafka/pyproject.toml | 2 +-
.../airflow/providers/apache/kafka/hooks/base.py | 2 +-
providers/celery/pyproject.toml | 2 +-
.../airflow/providers/celery/cli/celery_command.py | 4 +--
.../airflow/providers/celery/executors/__init__.py | 2 +-
.../src/airflow/providers/common/compat/check.py | 2 +-
.../providers/common/compat/openlineage/check.py | 2 +-
.../src/airflow/providers/common/compat/sdk.py | 2 ++
.../unit/common/compat/openlineage/test_check.py | 2 +-
.../compat/tests/unit/common/compat/test_check.py | 2 +-
providers/common/io/docs/index.rst | 11 ++++----
providers/common/io/pyproject.toml | 1 +
.../airflow/providers/common/io/xcom/__init__.py | 2 +-
.../src/airflow/providers/common/sql/hooks/sql.py | 9 ++++--
.../providers/databricks/hooks/databricks_base.py | 3 +-
.../providers/databricks/hooks/databricks_sql.py | 3 +-
.../databricks/plugins/databricks_workflow.py | 3 +-
.../unit/databricks/hooks/test_databricks_sql.py | 3 +-
.../unit/databricks/utils/test_openlineage.py | 2 +-
.../tests/unit/dbt/cloud/utils/test_openlineage.py | 2 +-
providers/exasol/docs/index.rst | 32 ++++++++++++----------
providers/exasol/pyproject.toml | 2 ++
.../src/airflow/providers/exasol/hooks/exasol.py | 3 +-
.../providers/google/cloud/hooks/bigquery.py | 4 +--
.../google/cloud/operators/kubernetes_engine.py | 2 +-
.../google/cloud/transfers/adls_to_gcs.py | 2 +-
.../google/cloud/transfers/azure_blob_to_gcs.py | 2 +-
.../cloud/transfers/azure_fileshare_to_gcs.py | 2 +-
.../providers/google/leveldb/hooks/leveldb.py | 7 +++--
.../cloud/vertex_ai/example_vertex_ai_ray.py | 2 +-
.../tests/system/google/leveldb/example_leveldb.py | 2 +-
.../unit/google/leveldb/hooks/test_leveldb.py | 2 +-
.../unit/google/leveldb/operators/test_leveldb.py | 2 +-
providers/mysql/pyproject.toml | 2 +-
.../src/airflow/providers/mysql/hooks/mysql.py | 2 +-
.../src/airflow/providers/openlineage/utils/sql.py | 2 +-
.../airflow/providers/openlineage/utils/utils.py | 9 ++++--
.../airflow/providers/postgres/hooks/postgres.py | 12 +++++---
.../tests/unit/postgres/hooks/test_postgres.py | 3 +-
.../src/airflow/providers/presto/hooks/presto.py | 8 ++++--
.../airflow/providers/snowflake/hooks/snowflake.py | 8 ++++--
.../tests/unit/snowflake/hooks/test_snowflake.py | 2 +-
.../tests/unit/snowflake/utils/test_openlineage.py | 2 +-
.../airflow/providers/standard/operators/hitl.py | 2 +-
.../airflow/providers/standard/triggers/hitl.py | 2 +-
.../providers/standard/utils/openlineage.py | 2 +-
.../tests/unit/standard/utils/test_openlineage.py | 2 +-
providers/teradata/pyproject.toml | 2 +-
.../airflow/providers/teradata/hooks/teradata.py | 2 +-
.../teradata/transfers/azure_blob_to_teradata.py | 2 +-
.../providers/teradata/transfers/s3_to_teradata.py | 2 +-
.../src/airflow/providers/trino/hooks/trino.py | 8 ++++--
providers/vertica/docs/index.rst | 28 ++++++++++---------
providers/vertica/pyproject.toml | 2 ++
.../src/airflow/providers/vertica/hooks/vertica.py | 2 +-
71 files changed, 172 insertions(+), 128 deletions(-)
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py
b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py
index df658be1f0c..aa23b4f2634 100644
---
a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py
+++
b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py
@@ -27,7 +27,6 @@ from fastapi import FastAPI
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager
from airflow.cli.cli_config import CLICommand
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
from airflow.providers.amazon.aws.auth_manager.avp.facade import (
AwsAuthManagerAmazonVerifiedPermissionsFacade,
@@ -35,7 +34,7 @@ from airflow.providers.amazon.aws.auth_manager.avp.facade
import (
)
from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
-from airflow.providers.common.compat.sdk import conf
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException, conf
if TYPE_CHECKING:
from airflow.api_fastapi.auth.managers.base_auth_manager import
ResourceMethod
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py
b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py
index 27aa5e7ccb3..43e3a815d33 100644
---
a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py
+++
b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py
@@ -22,9 +22,8 @@ from typing import TYPE_CHECKING
import boto3
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.amazon.aws.auth_manager.constants import
CONF_REGION_NAME_KEY, CONF_SECTION_NAME
-from airflow.providers.common.compat.sdk import conf
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException, conf
from airflow.utils import cli as cli_utils
try:
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py
b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py
index b4cc9845252..6cfc785aa99 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py
@@ -16,7 +16,7 @@
# under the License.
from __future__ import annotations
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from airflow.api_fastapi.auth.managers.models.base_user import BaseUser
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py
b/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py
index ad6c815223b..d9cda93a45d 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py
@@ -156,7 +156,7 @@ class AthenaSQLHook(AwsBaseHook, DbApiHook):
def get_uri(self) -> str:
"""Overridden to use the Athena dialect as driver name."""
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
if URL is None:
raise AirflowOptionalProviderFeatureException(
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py
b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py
index 4fc414cd281..eca7bd5afb3 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py
@@ -29,9 +29,8 @@ try:
except ImportError:
URL = create_engine = None # type: ignore[assignment,misc]
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.sql import DbApiHook
if TYPE_CHECKING:
diff --git a/providers/amazon/src/airflow/providers/amazon/aws/queues/sqs.py
b/providers/amazon/src/airflow/providers/amazon/aws/queues/sqs.py
index e7c0086a0a7..6ae8a00b7c9 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/queues/sqs.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/queues/sqs.py
@@ -19,8 +19,8 @@ from __future__ import annotations
import re
from typing import TYPE_CHECKING
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.amazon.aws.triggers.sqs import SqsSensorTrigger
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from airflow.providers.common.messaging.providers.base_provider import
BaseMessageQueueProvider
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
b/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
index c34d3469f2b..e30f96060be 100644
---
a/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
+++
b/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py
@@ -28,7 +28,7 @@ from airflow.providers.common.compat.sdk import BaseOperator
try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py
b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py
index 2c3133229cf..98df15c4932 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py
@@ -183,7 +183,7 @@ class SqlToS3Operator(BaseOperator):
import numpy as np
import pandas as pd
except ImportError as e:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/amazon/tests/unit/amazon/aws/hooks/test_hooks_signature.py
b/providers/amazon/tests/unit/amazon/aws/hooks/test_hooks_signature.py
index 8ec01fb2dca..6031ac056a6 100644
--- a/providers/amazon/tests/unit/amazon/aws/hooks/test_hooks_signature.py
+++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_hooks_signature.py
@@ -23,8 +23,8 @@ from pathlib import Path
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
BASE_AWS_HOOKS = ["AwsGenericHook", "AwsBaseHook"]
ALLOWED_THICK_HOOKS_PARAMETERS: dict[str, set[str]] = {
diff --git
a/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py
b/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py
index e306c7cc191..9dd36bd25a7 100644
--- a/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py
+++ b/providers/apache/beam/src/airflow/providers/apache/beam/hooks/beam.py
@@ -378,7 +378,7 @@ class BeamHook(BaseHook):
try:
from airflow.providers.google.go_module_utils import init_module,
install_dependencies
except ImportError:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import apache-airflow-google-provider. To start a
go pipeline, please install the"
diff --git
a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
index 45ecdb7fe86..45774f7daaa 100644
--- a/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
+++ b/providers/apache/beam/src/airflow/providers/apache/beam/operators/beam.py
@@ -30,10 +30,14 @@ from contextlib import ExitStack
from functools import partial
from typing import TYPE_CHECKING, Any
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
from airflow.providers.apache.beam.triggers.beam import
BeamJavaPipelineTrigger, BeamPythonPipelineTrigger
-from airflow.providers.common.compat.sdk import AirflowException,
BaseOperator, conf
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ BaseOperator,
+ conf,
+)
from airflow.providers_manager import ProvidersManager
from airflow.utils.helpers import convert_camel_to_snake, exactly_one
from airflow.version import version
diff --git
a/providers/apache/beam/src/airflow/providers/apache/beam/triggers/beam.py
b/providers/apache/beam/src/airflow/providers/apache/beam/triggers/beam.py
index 6b2464eb37f..ca3ca2e8a29 100644
--- a/providers/apache/beam/src/airflow/providers/apache/beam/triggers/beam.py
+++ b/providers/apache/beam/src/airflow/providers/apache/beam/triggers/beam.py
@@ -41,7 +41,7 @@ class BeamPipelineBaseTrigger(BaseTrigger):
try:
from airflow.providers.google.cloud.hooks.gcs import GCSAsyncHook
except ImportError:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import GCSAsyncHook. To use the GCSAsyncHook
functionality, please install the "
diff --git
a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
index 97de5518368..7b01f79b057 100644
--- a/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
+++ b/providers/apache/hive/src/airflow/providers/apache/hive/hooks/hive.py
@@ -1038,7 +1038,7 @@ class HiveServer2Hook(DbApiHook):
try:
import pandas as pd
except ImportError as e:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
@@ -1057,7 +1057,7 @@ class HiveServer2Hook(DbApiHook):
try:
import polars as pl
except ImportError as e:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git a/providers/apache/impala/docs/index.rst
b/providers/apache/impala/docs/index.rst
index edd1b81bad8..972145e587d 100644
--- a/providers/apache/impala/docs/index.rst
+++ b/providers/apache/impala/docs/index.rst
@@ -104,13 +104,14 @@ Requirements
The minimum Apache Airflow version supported by this provider distribution is
``2.11.0``.
-======================================= ==================
-PIP package Version required
-======================================= ==================
-``impyla`` ``>=0.22.0,<1.0``
-``apache-airflow-providers-common-sql`` ``>=1.26.0``
-``apache-airflow`` ``>=2.11.0``
-======================================= ==================
+========================================== ==================
+PIP package Version required
+========================================== ==================
+``impyla`` ``>=0.22.0,<1.0``
+``apache-airflow-providers-common-compat`` ``>=1.10.1``
+``apache-airflow-providers-common-sql`` ``>=1.26.0``
+``apache-airflow`` ``>=2.11.0``
+========================================== ==================
Cross provider package dependencies
-----------------------------------
@@ -122,14 +123,15 @@ You can install such cross-provider dependencies when
installing from PyPI. For
.. code-block:: bash
- pip install apache-airflow-providers-apache-impala[common.sql]
+ pip install apache-airflow-providers-apache-impala[common.compat]
-============================================================================================================
==============
-Dependent package
Extra
-============================================================================================================
==============
-`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
-============================================================================================================
==============
+==================================================================================================================
=================
+Dependent package
Extra
+==================================================================================================================
=================
+`apache-airflow-providers-common-compat
<https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_
``common.compat``
+`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
+==================================================================================================================
=================
Downloading official packages
-----------------------------
diff --git a/providers/apache/impala/pyproject.toml
b/providers/apache/impala/pyproject.toml
index 27ccc391aca..bd65bedc9a1 100644
--- a/providers/apache/impala/pyproject.toml
+++ b/providers/apache/impala/pyproject.toml
@@ -59,6 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"impyla>=0.22.0,<1.0",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"apache-airflow-providers-common-sql>=1.26.0",
"apache-airflow>=2.11.0",
]
@@ -78,6 +79,7 @@ dev = [
"apache-airflow",
"apache-airflow-task-sdk",
"apache-airflow-devel-common",
+ "apache-airflow-providers-common-compat",
"apache-airflow-providers-common-sql",
# Additional devel dependencies (do not remove this line and add extra
development dependencies)
"apache-airflow-providers-apache-impala[kerberos]",
diff --git
a/providers/apache/impala/src/airflow/providers/apache/impala/hooks/impala.py
b/providers/apache/impala/src/airflow/providers/apache/impala/hooks/impala.py
index 39b902ccbc8..07003c3af32 100644
---
a/providers/apache/impala/src/airflow/providers/apache/impala/hooks/impala.py
+++
b/providers/apache/impala/src/airflow/providers/apache/impala/hooks/impala.py
@@ -20,7 +20,7 @@ from typing import TYPE_CHECKING
from impala.dbapi import connect
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.sql import DbApiHook
if TYPE_CHECKING:
diff --git a/providers/apache/kafka/pyproject.toml
b/providers/apache/kafka/pyproject.toml
index 8d5617bb4ae..ba5eef52d1e 100644
--- a/providers/apache/kafka/pyproject.toml
+++ b/providers/apache/kafka/pyproject.toml
@@ -58,7 +58,7 @@ requires-python = ">=3.10,!=3.13"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.11.0",
+ "apache-airflow-providers-common-compat>=1.11.0", # use next version
"asgiref>=2.3.0",
"confluent-kafka>=2.6.0",
]
diff --git
a/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/base.py
b/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/base.py
index 907500b3ef9..4cc483d973e 100644
--- a/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/base.py
+++ b/providers/apache/kafka/src/airflow/providers/apache/kafka/hooks/base.py
@@ -72,7 +72,7 @@ class KafkaBaseHook(BaseHook):
try:
from airflow.providers.google.cloud.hooks.managed_kafka import
ManagedKafkaHook
except ImportError:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import ManagedKafkaHook. For using this
functionality google provider version "
diff --git a/providers/celery/pyproject.toml b/providers/celery/pyproject.toml
index aa1d8c20de1..03fead2cc3f 100644
--- a/providers/celery/pyproject.toml
+++ b/providers/celery/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
# The Celery is known to introduce problems when upgraded to a MAJOR
version. Airflow Core
# Uses Celery for CeleryExecutor, and we also know that Kubernetes Python
client follows SemVer
#
(https://docs.celeryq.dev/en/stable/contributing.html?highlight=semver#versions).
diff --git
a/providers/celery/src/airflow/providers/celery/cli/celery_command.py
b/providers/celery/src/airflow/providers/celery/cli/celery_command.py
index 01c87635b8d..57ebd233349 100644
--- a/providers/celery/src/airflow/providers/celery/cli/celery_command.py
+++ b/providers/celery/src/airflow/providers/celery/cli/celery_command.py
@@ -51,7 +51,7 @@ def _run_command_with_daemon_option(*args, **kwargs):
run_command_with_daemon_option(*args, **kwargs)
except ImportError:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import run_command_with_daemon_option. This feature is
only available in Airflow versions >= 2.8.0"
@@ -65,7 +65,7 @@ def _providers_configuration_loaded(func):
providers_configuration_loaded(func)(*args, **kwargs)
except ImportError as e:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import providers_configuration_loaded. This feature
is only available in Airflow versions >= 2.8.0"
diff --git
a/providers/celery/src/airflow/providers/celery/executors/__init__.py
b/providers/celery/src/airflow/providers/celery/executors/__init__.py
index 2503cb7e147..2a3c456f047 100644
--- a/providers/celery/src/airflow/providers/celery/executors/__init__.py
+++ b/providers/celery/src/airflow/providers/celery/executors/__init__.py
@@ -19,7 +19,7 @@ from __future__ import annotations
import packaging.version
from airflow import __version__ as airflow_version
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
base_version = packaging.version.parse(airflow_version).base_version
diff --git
a/providers/common/compat/src/airflow/providers/common/compat/check.py
b/providers/common/compat/src/airflow/providers/common/compat/check.py
index e11ce29be7c..b9d3c9ac9c0 100644
--- a/providers/common/compat/src/airflow/providers/common/compat/check.py
+++ b/providers/common/compat/src/airflow/providers/common/compat/check.py
@@ -23,7 +23,7 @@ from importlib import metadata
from packaging.version import Version
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
def require_provider_version(provider_name: str, provider_min_version: str):
diff --git
a/providers/common/compat/src/airflow/providers/common/compat/openlineage/check.py
b/providers/common/compat/src/airflow/providers/common/compat/openlineage/check.py
index 3598689ce47..a9ee740b582 100644
---
a/providers/common/compat/src/airflow/providers/common/compat/openlineage/check.py
+++
b/providers/common/compat/src/airflow/providers/common/compat/openlineage/check.py
@@ -24,7 +24,7 @@ from typing import Any
from packaging.version import Version
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
log = logging.getLogger(__name__)
diff --git a/providers/common/compat/src/airflow/providers/common/compat/sdk.py
b/providers/common/compat/src/airflow/providers/common/compat/sdk.py
index e29bfb1b42f..dc263c376d0 100644
--- a/providers/common/compat/src/airflow/providers/common/compat/sdk.py
+++ b/providers/common/compat/src/airflow/providers/common/compat/sdk.py
@@ -86,6 +86,7 @@ if TYPE_CHECKING:
AirflowException as AirflowException,
AirflowFailException as AirflowFailException,
AirflowNotFoundException as AirflowNotFoundException,
+ AirflowOptionalProviderFeatureException as
AirflowOptionalProviderFeatureException,
AirflowSensorTimeout as AirflowSensorTimeout,
AirflowSkipException as AirflowSkipException,
AirflowTaskTimeout as AirflowTaskTimeout,
@@ -239,6 +240,7 @@ _IMPORT_MAP: dict[str, str | tuple[str, ...]] = {
"AirflowException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowFailException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowNotFoundException": ("airflow.sdk.exceptions",
"airflow.exceptions"),
+ "AirflowOptionalProviderFeatureException": ("airflow.sdk.exceptions",
"airflow.exceptions"),
"AirflowSkipException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowTaskTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowSensorTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
diff --git
a/providers/common/compat/tests/unit/common/compat/openlineage/test_check.py
b/providers/common/compat/tests/unit/common/compat/openlineage/test_check.py
index 77f2a4a2eba..77125f23407 100644
--- a/providers/common/compat/tests/unit/common/compat/openlineage/test_check.py
+++ b/providers/common/compat/tests/unit/common/compat/openlineage/test_check.py
@@ -24,8 +24,8 @@ from unittest.mock import patch
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.common.compat.openlineage.check import
require_openlineage_version
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
REQUIRE_OPENLINEAGE_VERSION = r"`require_openlineage_version` decorator must
be used with at least one
argument.*@require_openlineage_version\(provider_min_version=\"1\.0\.0\"\)"
diff --git a/providers/common/compat/tests/unit/common/compat/test_check.py
b/providers/common/compat/tests/unit/common/compat/test_check.py
index 1459e915d6d..18a6df1a812 100644
--- a/providers/common/compat/tests/unit/common/compat/test_check.py
+++ b/providers/common/compat/tests/unit/common/compat/test_check.py
@@ -22,8 +22,8 @@ from unittest.mock import patch
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.common.compat.check import require_provider_version
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
def test_decorator_usage_without_parentheses():
diff --git a/providers/common/io/docs/index.rst
b/providers/common/io/docs/index.rst
index 4db775c3aaf..aa6e3af879e 100644
--- a/providers/common/io/docs/index.rst
+++ b/providers/common/io/docs/index.rst
@@ -99,11 +99,12 @@ Requirements
The minimum Apache Airflow version supported by this provider distribution is
``2.11.0``.
-================== ==================
-PIP package Version required
-================== ==================
-``apache-airflow`` ``>=2.11.0``
-================== ==================
+========================================== ==================
+PIP package Version required
+========================================== ==================
+``apache-airflow`` ``>=2.11.0``
+``apache-airflow-providers-common-compat`` ``>=1.10.1``
+========================================== ==================
Cross provider package dependencies
-----------------------------------
diff --git a/providers/common/io/pyproject.toml
b/providers/common/io/pyproject.toml
index d49a3499065..31b6a5ab516 100644
--- a/providers/common/io/pyproject.toml
+++ b/providers/common/io/pyproject.toml
@@ -59,6 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
]
# The optional dependencies should be modified in place in the generated file
diff --git
a/providers/common/io/src/airflow/providers/common/io/xcom/__init__.py
b/providers/common/io/src/airflow/providers/common/io/xcom/__init__.py
index 6bdd6c458f8..49a04af63c3 100644
--- a/providers/common/io/src/airflow/providers/common/io/xcom/__init__.py
+++ b/providers/common/io/src/airflow/providers/common/io/xcom/__init__.py
@@ -19,7 +19,7 @@ from __future__ import annotations
import packaging.version
from airflow import __version__ as airflow_version
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
if
packaging.version.parse(packaging.version.parse(airflow_version).base_version)
< packaging.version.parse(
"2.9.0"
diff --git a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
index 8ab5e887c4c..f8a29cf2329 100644
--- a/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
+++ b/providers/common/sql/src/airflow/providers/common/sql/hooks/sql.py
@@ -42,9 +42,14 @@ except ImportError:
NoSuchModuleError = Exception # type: ignore[misc,assignment]
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
AirflowProviderDeprecationWarning
+from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.compat.module_loading import import_string
-from airflow.providers.common.compat.sdk import AirflowException, BaseHook,
conf
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ BaseHook,
+ conf,
+)
from airflow.providers.common.sql.dialects.dialect import Dialect
from airflow.providers.common.sql.hooks import handlers
diff --git
a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
index fda94704a66..5cf4049a43d 100644
---
a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
+++
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
@@ -49,8 +49,7 @@ from tenacity import (
)
from airflow import __version__
-from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers_manager import ProvidersManager
try:
diff --git
a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
index 2684b00ac28..127f6b71c70 100644
---
a/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
+++
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
@@ -33,8 +33,7 @@ from typing import (
from databricks import sql
from databricks.sql.types import Row
-from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.handlers import
return_single_query_results
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.databricks.exceptions import
DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
diff --git
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
index 94fa56a863e..cb1b5c747e9 100644
---
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
+++
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
@@ -20,11 +20,12 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Any
from urllib.parse import unquote
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
TaskInstanceNotFound
+from airflow.exceptions import TaskInstanceNotFound
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance, TaskInstanceKey,
clear_task_instances
from airflow.providers.common.compat.sdk import (
AirflowException,
+ AirflowOptionalProviderFeatureException,
AirflowPlugin,
BaseOperatorLink,
TaskGroup,
diff --git
a/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py
b/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py
index 49d874973f8..a25fc835d9f 100644
--- a/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py
+++ b/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py
@@ -30,9 +30,8 @@ import polars as pl
import pytest
from databricks.sql.types import Row
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models import Connection
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
from airflow.providers.databricks.hooks.databricks_sql import
DatabricksSqlHook, create_timeout_thread
diff --git
a/providers/databricks/tests/unit/databricks/utils/test_openlineage.py
b/providers/databricks/tests/unit/databricks/utils/test_openlineage.py
index 2d127040d32..10efbb7fffc 100644
--- a/providers/databricks/tests/unit/databricks/utils/test_openlineage.py
+++ b/providers/databricks/tests/unit/databricks/utils/test_openlineage.py
@@ -24,12 +24,12 @@ import pytest
from openlineage.client.event_v2 import Job, Run, RunEvent, RunState
from openlineage.client.facet_v2 import job_type_job, parent_run
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.common.compat.openlineage.facet import (
ErrorMessageRunFacet,
ExternalQueryRunFacet,
SQLJobFacet,
)
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.databricks.hooks.databricks import DatabricksHook
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
from airflow.providers.databricks.utils.openlineage import (
diff --git a/providers/dbt/cloud/tests/unit/dbt/cloud/utils/test_openlineage.py
b/providers/dbt/cloud/tests/unit/dbt/cloud/utils/test_openlineage.py
index 9455a669f89..32e7342c3e5 100644
--- a/providers/dbt/cloud/tests/unit/dbt/cloud/utils/test_openlineage.py
+++ b/providers/dbt/cloud/tests/unit/dbt/cloud/utils/test_openlineage.py
@@ -24,7 +24,7 @@ import pytest
from openlineage.client.constants import __version__
from packaging.version import parse
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook
from airflow.providers.dbt.cloud.operators.dbt import DbtCloudRunJobOperator
from airflow.providers.dbt.cloud.utils.openlineage import (
diff --git a/providers/exasol/docs/index.rst b/providers/exasol/docs/index.rst
index c10293ec40a..18f31b0ce44 100644
--- a/providers/exasol/docs/index.rst
+++ b/providers/exasol/docs/index.rst
@@ -95,15 +95,16 @@ Requirements
The minimum Apache Airflow version supported by this provider distribution is
``2.11.0``.
-======================================= =====================================
-PIP package Version required
-======================================= =====================================
-``apache-airflow`` ``>=2.11.0``
-``apache-airflow-providers-common-sql`` ``>=1.26.0``
-``pyexasol`` ``>=0.26.0``
-``pandas`` ``>=2.1.2; python_version < "3.13"``
-``pandas`` ``>=2.2.3; python_version >= "3.13"``
-======================================= =====================================
+==========================================
=====================================
+PIP package Version required
+==========================================
=====================================
+``apache-airflow`` ``>=2.11.0``
+``apache-airflow-providers-common-compat`` ``>=1.10.1``
+``apache-airflow-providers-common-sql`` ``>=1.26.0``
+``pyexasol`` ``>=0.26.0``
+``pandas`` ``>=2.1.2; python_version <
"3.13"``
+``pandas`` ``>=2.2.3; python_version >=
"3.13"``
+==========================================
=====================================
Cross provider package dependencies
-----------------------------------
@@ -115,14 +116,15 @@ You can install such cross-provider dependencies when
installing from PyPI. For
.. code-block:: bash
- pip install apache-airflow-providers-exasol[common.sql]
+ pip install apache-airflow-providers-exasol[common.compat]
-============================================================================================================
==============
-Dependent package
Extra
-============================================================================================================
==============
-`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
-============================================================================================================
==============
+==================================================================================================================
=================
+Dependent package
Extra
+==================================================================================================================
=================
+`apache-airflow-providers-common-compat
<https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_
``common.compat``
+`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
+==================================================================================================================
=================
Downloading official packages
-----------------------------
diff --git a/providers/exasol/pyproject.toml b/providers/exasol/pyproject.toml
index 78478aa0f09..62d974bffee 100644
--- a/providers/exasol/pyproject.toml
+++ b/providers/exasol/pyproject.toml
@@ -59,6 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"apache-airflow-providers-common-sql>=1.26.0",
"pyexasol>=0.26.0",
'pandas>=2.1.2; python_version <"3.13"',
@@ -77,6 +78,7 @@ dev = [
"apache-airflow",
"apache-airflow-task-sdk",
"apache-airflow-devel-common",
+ "apache-airflow-providers-common-compat",
"apache-airflow-providers-common-sql",
# Additional devel dependencies (do not remove this line and add extra
development dependencies)
]
diff --git a/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py
b/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py
index 77ca0048809..9cda8136893 100644
--- a/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py
+++ b/providers/exasol/src/airflow/providers/exasol/hooks/exasol.py
@@ -30,7 +30,8 @@ try:
except ImportError:
URL = None # type: ignore[assignment,misc]
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
AirflowProviderDeprecationWarning
+from airflow.exceptions import AirflowProviderDeprecationWarning
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.handlers import
return_single_query_results
from airflow.providers.common.sql.hooks.sql import DbApiHook
diff --git
a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
index 5731baec73d..3c9c6c07077 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
@@ -59,9 +59,9 @@ from pandas_gbq import read_gbq
from pandas_gbq.gbq import GbqConnector # noqa: F401 used in
``airflow.contrib.hooks.bigquery``
from sqlalchemy import create_engine
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
AirflowProviderDeprecationWarning
+from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.compat.lineage.hook import
get_hook_lineage_collector
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.google.cloud.utils.bigquery import bq_cast
from airflow.providers.google.cloud.utils.credentials_provider import
_get_scopes
diff --git
a/providers/google/src/airflow/providers/google/cloud/operators/kubernetes_engine.py
b/providers/google/src/airflow/providers/google/cloud/operators/kubernetes_engine.py
index cf50d56fc36..e04ecd3b65c 100644
---
a/providers/google/src/airflow/providers/google/cloud/operators/kubernetes_engine.py
+++
b/providers/google/src/airflow/providers/google/cloud/operators/kubernetes_engine.py
@@ -63,7 +63,7 @@ from airflow.providers_manager import ProvidersManager
try:
from airflow.providers.cncf.kubernetes.operators.job import
KubernetesDeleteJobOperator
except ImportError:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import KubernetesDeleteJobOperator. This operator is only
available in cncf-kubernetes "
diff --git
a/providers/google/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py
b/providers/google/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py
index d4040c6802b..9bd75b14ac3 100644
---
a/providers/google/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py
+++
b/providers/google/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py
@@ -30,7 +30,7 @@ try:
from airflow.providers.microsoft.azure.hooks.data_lake import
AzureDataLakeHook
from airflow.providers.microsoft.azure.operators.adls import
ADLSListOperator
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/google/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py
b/providers/google/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py
index 47248e5c4ab..1ace104c282 100644
---
a/providers/google/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py
+++
b/providers/google/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py
@@ -27,7 +27,7 @@ from airflow.providers.google.version_compat import
BaseOperator
try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/google/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
b/providers/google/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
index bbfe57509d1..ad362c508ba 100644
---
a/providers/google/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
+++
b/providers/google/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
@@ -30,7 +30,7 @@ from airflow.providers.google.version_compat import
BaseOperator
try:
from airflow.providers.microsoft.azure.hooks.fileshare import
AzureFileShareHook
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/google/src/airflow/providers/google/leveldb/hooks/leveldb.py
b/providers/google/src/airflow/providers/google/leveldb/hooks/leveldb.py
index 09a1dd1c067..b7822869daa 100644
--- a/providers/google/src/airflow/providers/google/leveldb/hooks/leveldb.py
+++ b/providers/google/src/airflow/providers/google/leveldb/hooks/leveldb.py
@@ -20,8 +20,11 @@ from __future__ import annotations
from typing import Any
-from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.providers.common.compat.sdk import AirflowException, BaseHook
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ BaseHook,
+)
try:
import plyvel
diff --git
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_ray.py
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_ray.py
index 6db0a560c1d..566ef8d0e21 100644
---
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_ray.py
+++
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_ray.py
@@ -26,7 +26,7 @@ from __future__ import annotations
import os
from datetime import datetime
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from google.cloud.aiplatform.vertex_ray.util import resources
diff --git a/providers/google/tests/system/google/leveldb/example_leveldb.py
b/providers/google/tests/system/google/leveldb/example_leveldb.py
index 6865f80d4de..bf552e49620 100644
--- a/providers/google/tests/system/google/leveldb/example_leveldb.py
+++ b/providers/google/tests/system/google/leveldb/example_leveldb.py
@@ -26,8 +26,8 @@ from datetime import datetime
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models.dag import DAG
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from airflow.providers.google.leveldb.operators.leveldb import
LevelDBOperator
diff --git a/providers/google/tests/unit/google/leveldb/hooks/test_leveldb.py
b/providers/google/tests/unit/google/leveldb/hooks/test_leveldb.py
index 1443cafe36a..36dbf13d533 100644
--- a/providers/google/tests/unit/google/leveldb/hooks/test_leveldb.py
+++ b/providers/google/tests/unit/google/leveldb/hooks/test_leveldb.py
@@ -21,7 +21,7 @@ from unittest import mock
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from airflow.providers.google.leveldb.hooks.leveldb import LevelDBHook,
LevelDBHookException
diff --git
a/providers/google/tests/unit/google/leveldb/operators/test_leveldb.py
b/providers/google/tests/unit/google/leveldb/operators/test_leveldb.py
index 4d48b576a9d..57a4956bfd5 100644
--- a/providers/google/tests/unit/google/leveldb/operators/test_leveldb.py
+++ b/providers/google/tests/unit/google/leveldb/operators/test_leveldb.py
@@ -37,7 +37,7 @@ from unittest import mock
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
try:
from airflow.providers.google.leveldb.hooks.leveldb import LevelDBHook
diff --git a/providers/mysql/pyproject.toml b/providers/mysql/pyproject.toml
index 01d144e57cc..7169d6e1202 100644
--- a/providers/mysql/pyproject.toml
+++ b/providers/mysql/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.8.0",
+ "apache-airflow-providers-common-compat>=1.8.0", # use next version
"apache-airflow-providers-common-sql>=1.20.0",
# The mysqlclient package creates friction when installing on MacOS as it
needs pkg-config to
# Install and compile, and it's really only used by MySQL provider, so we
can skip it on MacOS
diff --git a/providers/mysql/src/airflow/providers/mysql/hooks/mysql.py
b/providers/mysql/src/airflow/providers/mysql/hooks/mysql.py
index b7500bb8b8c..99061d1521e 100644
--- a/providers/mysql/src/airflow/providers/mysql/hooks/mysql.py
+++ b/providers/mysql/src/airflow/providers/mysql/hooks/mysql.py
@@ -24,7 +24,7 @@ import logging
from typing import TYPE_CHECKING, Any, Union
from urllib.parse import quote_plus, urlencode
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.sql import DbApiHook
logger = logging.getLogger(__name__)
diff --git
a/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py
b/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py
index 903dc70a7c4..aa17ba3dc32 100644
--- a/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py
+++ b/providers/openlineage/src/airflow/providers/openlineage/utils/sql.py
@@ -26,7 +26,7 @@ from attrs import define
from openlineage.client.event_v2 import Dataset
from openlineage.client.facet_v2 import schema_dataset
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
if TYPE_CHECKING:
from sqlalchemy import Table
diff --git
a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
index ba45eac13de..0ad0f0e88bc 100644
--- a/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
+++ b/providers/openlineage/src/airflow/providers/openlineage/utils/utils.py
@@ -32,11 +32,16 @@ from openlineage.client.utils import RedactMixin
from openlineage.client.uuid import generate_static_uuid
from airflow import __version__ as AIRFLOW_VERSION
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models import DagRun, TaskInstance, TaskReschedule
from airflow.providers.common.compat.assets import Asset
from airflow.providers.common.compat.module_loading import import_string
-from airflow.providers.common.compat.sdk import DAG, BaseOperator,
BaseSensorOperator, MappedOperator
+from airflow.providers.common.compat.sdk import (
+ DAG,
+ AirflowOptionalProviderFeatureException,
+ BaseOperator,
+ BaseSensorOperator,
+ MappedOperator,
+)
from airflow.providers.openlineage import (
__version__ as OPENLINEAGE_PROVIDER_VERSION,
conf,
diff --git
a/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py
b/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py
index 518eabeb5a6..60d27a175f8 100644
--- a/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py
+++ b/providers/postgres/src/airflow/providers/postgres/hooks/postgres.py
@@ -28,8 +28,12 @@ import psycopg2.extras
from more_itertools import chunked
from psycopg2.extras import DictCursor, NamedTupleCursor, RealDictCursor,
execute_batch
-from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.providers.common.compat.sdk import AirflowException, Connection,
conf
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ Connection,
+ conf,
+)
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.postgres.dialects.postgres import PostgresDialect
@@ -460,7 +464,7 @@ class PostgresHook(DbApiHook):
try:
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
except ImportError:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"apache-airflow-providers-amazon not installed, run: "
@@ -568,7 +572,7 @@ class PostgresHook(DbApiHook):
try:
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
except ImportError:
- from airflow.exceptions import
AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"apache-airflow-providers-amazon not installed, run: "
diff --git a/providers/postgres/tests/unit/postgres/hooks/test_postgres.py
b/providers/postgres/tests/unit/postgres/hooks/test_postgres.py
index 7766f90e05e..40f613e73a2 100644
--- a/providers/postgres/tests/unit/postgres/hooks/test_postgres.py
+++ b/providers/postgres/tests/unit/postgres/hooks/test_postgres.py
@@ -27,9 +27,8 @@ import polars as pl
import pytest
import sqlalchemy
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models import Connection
-from airflow.providers.common.compat.sdk import AirflowException
+from airflow.providers.common.compat.sdk import AirflowException,
AirflowOptionalProviderFeatureException
from airflow.providers.postgres.dialects.postgres import PostgresDialect
from airflow.providers.postgres.hooks.postgres import CompatConnection,
PostgresHook
diff --git a/providers/presto/src/airflow/providers/presto/hooks/presto.py
b/providers/presto/src/airflow/providers/presto/hooks/presto.py
index c0c3f75d5b9..5cfee1e3fc7 100644
--- a/providers/presto/src/airflow/providers/presto/hooks/presto.py
+++ b/providers/presto/src/airflow/providers/presto/hooks/presto.py
@@ -26,8 +26,12 @@ from deprecated import deprecated
from prestodb.exceptions import DatabaseError
from prestodb.transaction import IsolationLevel
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
AirflowProviderDeprecationWarning
-from airflow.providers.common.compat.sdk import AirflowException, conf
+from airflow.exceptions import AirflowProviderDeprecationWarning
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ conf,
+)
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.presto.version_compat import AIRFLOW_V_3_0_PLUS
diff --git
a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py
b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py
index df1b584ff9d..e5bf83558de 100644
--- a/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py
+++ b/providers/snowflake/src/airflow/providers/snowflake/hooks/snowflake.py
@@ -37,8 +37,12 @@ from snowflake.connector import DictCursor,
SnowflakeConnection, util_text
from snowflake.sqlalchemy import URL
from sqlalchemy import create_engine
-from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.providers.common.compat.sdk import AirflowException, Connection,
conf
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ Connection,
+ conf,
+)
from airflow.providers.common.sql.hooks.handlers import
return_single_query_results
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.snowflake.utils.openlineage import
fix_snowflake_sqlalchemy_uri
diff --git a/providers/snowflake/tests/unit/snowflake/hooks/test_snowflake.py
b/providers/snowflake/tests/unit/snowflake/hooks/test_snowflake.py
index b44703c5469..c5eef78787d 100644
--- a/providers/snowflake/tests/unit/snowflake/hooks/test_snowflake.py
+++ b/providers/snowflake/tests/unit/snowflake/hooks/test_snowflake.py
@@ -31,8 +31,8 @@ from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models import Connection
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
from airflow.utils import timezone
diff --git a/providers/snowflake/tests/unit/snowflake/utils/test_openlineage.py
b/providers/snowflake/tests/unit/snowflake/utils/test_openlineage.py
index 0dcee3adc95..cc7eabb2f23 100644
--- a/providers/snowflake/tests/unit/snowflake/utils/test_openlineage.py
+++ b/providers/snowflake/tests/unit/snowflake/utils/test_openlineage.py
@@ -24,12 +24,12 @@ import pytest
from openlineage.client.event_v2 import Job, Run, RunEvent, RunState
from openlineage.client.facet_v2 import job_type_job, parent_run
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.common.compat.openlineage.facet import (
ErrorMessageRunFacet,
ExternalQueryRunFacet,
SQLJobFacet,
)
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.openlineage.conf import namespace
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
from airflow.providers.snowflake.hooks.snowflake_sql_api import
SnowflakeSqlApiHook
diff --git
a/providers/standard/src/airflow/providers/standard/operators/hitl.py
b/providers/standard/src/airflow/providers/standard/operators/hitl.py
index 37fcadfe014..842b448b08e 100644
--- a/providers/standard/src/airflow/providers/standard/operators/hitl.py
+++ b/providers/standard/src/airflow/providers/standard/operators/hitl.py
@@ -18,7 +18,7 @@ from __future__ import annotations
import logging
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_3_PLUS,
AIRFLOW_V_3_1_PLUS
if not AIRFLOW_V_3_1_PLUS:
diff --git a/providers/standard/src/airflow/providers/standard/triggers/hitl.py
b/providers/standard/src/airflow/providers/standard/triggers/hitl.py
index 3579c3595de..d7aff2e389c 100644
--- a/providers/standard/src/airflow/providers/standard/triggers/hitl.py
+++ b/providers/standard/src/airflow/providers/standard/triggers/hitl.py
@@ -16,7 +16,7 @@
# under the License.
from __future__ import annotations
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
if not AIRFLOW_V_3_1_PLUS:
diff --git
a/providers/standard/src/airflow/providers/standard/utils/openlineage.py
b/providers/standard/src/airflow/providers/standard/utils/openlineage.py
index 3dd06b4eaec..95f38ab840a 100644
--- a/providers/standard/src/airflow/providers/standard/utils/openlineage.py
+++ b/providers/standard/src/airflow/providers/standard/utils/openlineage.py
@@ -19,8 +19,8 @@ from __future__ import annotations
import logging
from typing import TYPE_CHECKING
-from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.providers.common.compat.openlineage.check import
require_openlineage_version
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
if TYPE_CHECKING:
from airflow.models import TaskInstance
diff --git a/providers/standard/tests/unit/standard/utils/test_openlineage.py
b/providers/standard/tests/unit/standard/utils/test_openlineage.py
index d0e0ac5f43c..25b80fa2f65 100644
--- a/providers/standard/tests/unit/standard/utils/test_openlineage.py
+++ b/providers/standard/tests/unit/standard/utils/test_openlineage.py
@@ -21,7 +21,7 @@ from unittest.mock import MagicMock, patch
import pytest
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.standard.utils.openlineage import (
OPENLINEAGE_PROVIDER_MIN_VERSION,
_get_openlineage_parent_info,
diff --git a/providers/teradata/pyproject.toml
b/providers/teradata/pyproject.toml
index 97db29ba1d2..22735f86539 100644
--- a/providers/teradata/pyproject.toml
+++ b/providers/teradata/pyproject.toml
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
- "apache-airflow-providers-common-compat>=1.10.1",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"apache-airflow-providers-common-sql>=1.20.0",
"teradatasqlalchemy>=17.20.0.0",
"teradatasql>=17.20.0.28",
diff --git
a/providers/teradata/src/airflow/providers/teradata/hooks/teradata.py
b/providers/teradata/src/airflow/providers/teradata/hooks/teradata.py
index 6131c4c3c31..57abbfa8046 100644
--- a/providers/teradata/src/airflow/providers/teradata/hooks/teradata.py
+++ b/providers/teradata/src/airflow/providers/teradata/hooks/teradata.py
@@ -25,7 +25,7 @@ from typing import TYPE_CHECKING, Any
import teradatasql
from teradatasql import TeradataConnection
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.sql import DbApiHook
if TYPE_CHECKING:
diff --git
a/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
b/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
index 98a32777498..2393a883195 100644
---
a/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
+++
b/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
@@ -24,7 +24,7 @@ from typing import TYPE_CHECKING
try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
diff --git
a/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
b/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
index d6c74d628ce..51f3643c528 100644
---
a/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
+++
b/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
@@ -24,7 +24,7 @@ from typing import TYPE_CHECKING
try:
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
except ModuleNotFoundError as e:
- from airflow.exceptions import AirflowOptionalProviderFeatureException
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
from airflow.providers.common.compat.sdk import BaseOperator
diff --git a/providers/trino/src/airflow/providers/trino/hooks/trino.py
b/providers/trino/src/airflow/providers/trino/hooks/trino.py
index 464c14999ab..943d638f8d9 100644
--- a/providers/trino/src/airflow/providers/trino/hooks/trino.py
+++ b/providers/trino/src/airflow/providers/trino/hooks/trino.py
@@ -29,8 +29,12 @@ from deprecated import deprecated
from trino.exceptions import DatabaseError
from trino.transaction import IsolationLevel
-from airflow.exceptions import AirflowOptionalProviderFeatureException,
AirflowProviderDeprecationWarning
-from airflow.providers.common.compat.sdk import AirflowException, conf
+from airflow.exceptions import AirflowProviderDeprecationWarning
+from airflow.providers.common.compat.sdk import (
+ AirflowException,
+ AirflowOptionalProviderFeatureException,
+ conf,
+)
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.trino.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.utils.helpers import exactly_one
diff --git a/providers/vertica/docs/index.rst b/providers/vertica/docs/index.rst
index 7c41d446278..e6ebdb29fbb 100644
--- a/providers/vertica/docs/index.rst
+++ b/providers/vertica/docs/index.rst
@@ -98,13 +98,14 @@ Requirements
The minimum Apache Airflow version supported by this provider distribution is
``2.11.0``.
-======================================= ==================
-PIP package Version required
-======================================= ==================
-``apache-airflow`` ``>=2.11.0``
-``apache-airflow-providers-common-sql`` ``>=1.26.0``
-``vertica-python`` ``>=1.3.0``
-======================================= ==================
+========================================== ==================
+PIP package Version required
+========================================== ==================
+``apache-airflow`` ``>=2.11.0``
+``apache-airflow-providers-common-compat`` ``>=1.10.1``
+``apache-airflow-providers-common-sql`` ``>=1.26.0``
+``vertica-python`` ``>=1.3.0``
+========================================== ==================
Cross provider package dependencies
-----------------------------------
@@ -116,14 +117,15 @@ You can install such cross-provider dependencies when
installing from PyPI. For
.. code-block:: bash
- pip install apache-airflow-providers-vertica[common.sql]
+ pip install apache-airflow-providers-vertica[common.compat]
-============================================================================================================
==============
-Dependent package
Extra
-============================================================================================================
==============
-`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
-============================================================================================================
==============
+==================================================================================================================
=================
+Dependent package
Extra
+==================================================================================================================
=================
+`apache-airflow-providers-common-compat
<https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_
``common.compat``
+`apache-airflow-providers-common-sql
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
``common.sql``
+==================================================================================================================
=================
Downloading official packages
-----------------------------
diff --git a/providers/vertica/pyproject.toml b/providers/vertica/pyproject.toml
index 57cb90be53b..5c51c6b8721 100644
--- a/providers/vertica/pyproject.toml
+++ b/providers/vertica/pyproject.toml
@@ -59,6 +59,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with
``breeze ci-image build``
dependencies = [
"apache-airflow>=2.11.0",
+ "apache-airflow-providers-common-compat>=1.10.1", # use next version
"apache-airflow-providers-common-sql>=1.26.0",
"vertica-python>=1.3.0",
]
@@ -75,6 +76,7 @@ dev = [
"apache-airflow",
"apache-airflow-task-sdk",
"apache-airflow-devel-common",
+ "apache-airflow-providers-common-compat",
"apache-airflow-providers-common-sql",
# Additional devel dependencies (do not remove this line and add extra
development dependencies)
"apache-airflow-providers-common-sql[pandas,polars]",
diff --git a/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py
b/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py
index b0e65bfa263..84634ff26bf 100644
--- a/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py
+++ b/providers/vertica/src/airflow/providers/vertica/hooks/vertica.py
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Any, overload
from vertica_python import connect
-from airflow.exceptions import AirflowOptionalProviderFeatureException
+from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
from airflow.providers.common.sql.hooks.sql import DbApiHook