This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 9d8c77e447 Improve modules import in Airflow providers by some of them
into a type-checking block (#33754)
9d8c77e447 is described below
commit 9d8c77e447f5515b9a6aa85fa72511a86a128c28
Author: Hussein Awala <[email protected]>
AuthorDate: Sun Aug 27 19:10:12 2023 +0200
Improve modules import in Airflow providers by some of them into a
type-checking block (#33754)
---
airflow/providers/apache/beam/hooks/beam.py | 6 +++--
airflow/providers/apache/drill/hooks/drill.py | 6 +++--
.../apache/flink/operators/flink_kubernetes.py | 4 ++--
airflow/providers/apache/impala/hooks/impala.py | 6 ++++-
airflow/providers/apache/livy/hooks/livy.py | 6 +++--
airflow/providers/apache/pinot/hooks/pinot.py | 6 +++--
airflow/providers/apprise/notifications/apprise.py | 6 +++--
.../providers/celery/executors/celery_executor.py | 3 ++-
.../celery/executors/celery_executor_utils.py | 5 +++--
.../celery/executors/celery_kubernetes_executor.py | 4 ++--
.../providers/databricks/hooks/databricks_base.py | 6 +++--
.../providers/databricks/hooks/databricks_sql.py | 6 +++--
.../providers/databricks/operators/databricks.py | 3 ++-
.../databricks/operators/databricks_sql.py | 3 ++-
.../providers/elasticsearch/hooks/elasticsearch.py | 6 +++--
.../providers/elasticsearch/log/es_task_handler.py | 7 ++++--
airflow/providers/facebook/ads/hooks/ads.py | 6 +++--
airflow/providers/hashicorp/hooks/vault.py | 8 ++++---
airflow/providers/http/operators/http.py | 6 ++---
airflow/providers/http/triggers/http.py | 6 +++--
airflow/providers/imap/hooks/imap.py | 6 +++--
airflow/providers/influxdb/hooks/influxdb.py | 5 +++--
airflow/providers/jdbc/hooks/jdbc.py | 6 +++--
airflow/providers/microsoft/azure/hooks/adx.py | 6 +++--
airflow/providers/microsoft/azure/hooks/batch.py | 6 +++--
.../microsoft/azure/hooks/container_instance.py | 5 ++++-
.../microsoft/azure/hooks/data_factory.py | 26 ++++++++++++----------
airflow/providers/microsoft/azure/hooks/synapse.py | 6 +++--
airflow/providers/microsoft/azure/hooks/wasb.py | 6 +++--
.../microsoft/azure/log/wasb_task_handler.py | 4 +++-
airflow/providers/microsoft/azure/operators/adx.py | 4 ++--
airflow/providers/microsoft/azure/operators/asb.py | 3 ++-
.../providers/microsoft/azure/operators/synapse.py | 4 ++--
airflow/providers/microsoft/psrp/operators/psrp.py | 3 ++-
airflow/providers/mongo/hooks/mongo.py | 9 +++++---
airflow/providers/mysql/hooks/mysql.py | 3 ++-
airflow/providers/neo4j/hooks/neo4j.py | 6 +++--
airflow/providers/openlineage/extractors/base.py | 7 ++++--
airflow/providers/openlineage/plugins/adapter.py | 2 +-
airflow/providers/openlineage/sqlparser.py | 2 +-
.../providers/pagerduty/hooks/pagerduty_events.py | 6 +++--
airflow/providers/postgres/hooks/postgres.py | 5 +++--
airflow/providers/presto/hooks/presto.py | 6 +++--
airflow/providers/redis/log/redis_task_handler.py | 10 +++++----
airflow/providers/salesforce/hooks/salesforce.py | 2 +-
airflow/providers/salesforce/operators/bulk.py | 2 +-
airflow/providers/samba/hooks/samba.py | 5 ++++-
airflow/providers/sftp/hooks/sftp.py | 7 +++---
airflow/providers/slack/transfers/sql_to_slack.py | 2 +-
airflow/providers/smtp/hooks/smtp.py | 6 +++--
airflow/providers/smtp/operators/smtp.py | 6 +++--
.../snowflake/triggers/snowflake_trigger.py | 6 +++--
airflow/providers/tableau/hooks/tableau.py | 6 +++--
airflow/providers/trino/hooks/trino.py | 6 +++--
airflow/providers/zendesk/hooks/zendesk.py | 10 ++++++---
.../databricks/operators/test_databricks_sql.py | 3 ++-
.../azure/operators/test_azure_data_factory.py | 5 ++++-
.../providers/microsoft/azure/sensors/test_wasb.py | 5 ++++-
tests/providers/mongo/hooks/test_mongo.py | 5 ++++-
tests/providers/snowflake/hooks/test_snowflake.py | 6 +++--
.../snowflake/hooks/test_snowflake_sql_api.py | 6 +++--
61 files changed, 223 insertions(+), 120 deletions(-)
diff --git a/airflow/providers/apache/beam/hooks/beam.py
b/airflow/providers/apache/beam/hooks/beam.py
index 72dc224626..27957b5ca7 100644
--- a/airflow/providers/apache/beam/hooks/beam.py
+++ b/airflow/providers/apache/beam/hooks/beam.py
@@ -23,7 +23,6 @@ import contextlib
import copy
import functools
import json
-import logging
import os
import select
import shlex
@@ -31,7 +30,7 @@ import shutil
import subprocess
import tempfile
import textwrap
-from typing import Callable
+from typing import TYPE_CHECKING, Callable
from packaging.version import Version
@@ -40,6 +39,9 @@ from airflow.hooks.base import BaseHook
from airflow.providers.google.go_module_utils import init_module,
install_dependencies
from airflow.utils.python_virtualenv import prepare_virtualenv
+if TYPE_CHECKING:
+ import logging
+
class BeamRunnerType:
"""
diff --git a/airflow/providers/apache/drill/hooks/drill.py
b/airflow/providers/apache/drill/hooks/drill.py
index 962bd93aea..84043c4efb 100644
--- a/airflow/providers/apache/drill/hooks/drill.py
+++ b/airflow/providers/apache/drill/hooks/drill.py
@@ -17,13 +17,15 @@
# under the License.
from __future__ import annotations
-from typing import Any, Iterable
+from typing import TYPE_CHECKING, Any, Iterable
from sqlalchemy import create_engine
-from sqlalchemy.engine import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
+if TYPE_CHECKING:
+ from sqlalchemy.engine import Connection
+
class DrillHook(DbApiHook):
"""
diff --git a/airflow/providers/apache/flink/operators/flink_kubernetes.py
b/airflow/providers/apache/flink/operators/flink_kubernetes.py
index 25f66d1a73..3a8744b085 100644
--- a/airflow/providers/apache/flink/operators/flink_kubernetes.py
+++ b/airflow/providers/apache/flink/operators/flink_kubernetes.py
@@ -20,12 +20,12 @@ from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Sequence
-from kubernetes.client import CoreV1Api
-
from airflow.models import BaseOperator
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
if TYPE_CHECKING:
+ from kubernetes.client import CoreV1Api
+
from airflow.utils.context import Context
diff --git a/airflow/providers/apache/impala/hooks/impala.py
b/airflow/providers/apache/impala/hooks/impala.py
index 0bfd9975d7..ab19865a9e 100644
--- a/airflow/providers/apache/impala/hooks/impala.py
+++ b/airflow/providers/apache/impala/hooks/impala.py
@@ -16,11 +16,15 @@
# under the License.
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from impala.dbapi import connect
-from impala.interface import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
+if TYPE_CHECKING:
+ from impala.interface import Connection
+
class ImpalaHook(DbApiHook):
"""Interact with Apache Impala through impyla."""
diff --git a/airflow/providers/apache/livy/hooks/livy.py
b/airflow/providers/apache/livy/hooks/livy.py
index ba2ff1bb13..96b1582d49 100644
--- a/airflow/providers/apache/livy/hooks/livy.py
+++ b/airflow/providers/apache/livy/hooks/livy.py
@@ -21,7 +21,7 @@ import asyncio
import json
import re
from enum import Enum
-from typing import Any, Sequence
+from typing import TYPE_CHECKING, Any, Sequence
import aiohttp
import requests
@@ -29,10 +29,12 @@ from aiohttp import ClientResponseError
from asgiref.sync import sync_to_async
from airflow.exceptions import AirflowException
-from airflow.models import Connection
from airflow.providers.http.hooks.http import HttpAsyncHook, HttpHook
from airflow.utils.log.logging_mixin import LoggingMixin
+if TYPE_CHECKING:
+ from airflow.models import Connection
+
class BatchState(Enum):
"""Batch session states."""
diff --git a/airflow/providers/apache/pinot/hooks/pinot.py
b/airflow/providers/apache/pinot/hooks/pinot.py
index 0fe11a04e4..3f89233fe6 100644
--- a/airflow/providers/apache/pinot/hooks/pinot.py
+++ b/airflow/providers/apache/pinot/hooks/pinot.py
@@ -19,15 +19,17 @@ from __future__ import annotations
import os
import subprocess
-from typing import Any, Iterable, Mapping
+from typing import TYPE_CHECKING, Any, Iterable, Mapping
from pinotdb import connect
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
-from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
+if TYPE_CHECKING:
+ from airflow.models import Connection
+
class PinotAdminHook(BaseHook):
"""
diff --git a/airflow/providers/apprise/notifications/apprise.py
b/airflow/providers/apprise/notifications/apprise.py
index cf78d68f37..3b4fb0c15f 100644
--- a/airflow/providers/apprise/notifications/apprise.py
+++ b/airflow/providers/apprise/notifications/apprise.py
@@ -18,7 +18,7 @@
from __future__ import annotations
from functools import cached_property
-from typing import Iterable
+from typing import TYPE_CHECKING, Iterable
from airflow.exceptions import AirflowOptionalProviderFeatureException
@@ -29,10 +29,12 @@ except ImportError:
"Failed to import BaseNotifier. This feature is only available in
Airflow versions >= 2.6.0"
)
-from apprise import AppriseConfig, NotifyFormat, NotifyType
from airflow.providers.apprise.hooks.apprise import AppriseHook
+if TYPE_CHECKING:
+ from apprise import AppriseConfig, NotifyFormat, NotifyType
+
class AppriseNotifier(BaseNotifier):
"""
diff --git a/airflow/providers/celery/executors/celery_executor.py
b/airflow/providers/celery/executors/celery_executor.py
index cc1b6e8122..a73e7dbb73 100644
--- a/airflow/providers/celery/executors/celery_executor.py
+++ b/airflow/providers/celery/executors/celery_executor.py
@@ -23,7 +23,6 @@
"""
from __future__ import annotations
-import argparse
import logging
import math
import operator
@@ -83,6 +82,8 @@ CELERY_SEND_ERR_MSG_HEADER = "Error sending Celery task"
if TYPE_CHECKING:
+ import argparse
+
from celery import Task
from airflow.executors.base_executor import CommandType, TaskTuple
diff --git a/airflow/providers/celery/executors/celery_executor_utils.py
b/airflow/providers/celery/executors/celery_executor_utils.py
index 5cd8ea7eb1..df261323dc 100644
--- a/airflow/providers/celery/executors/celery_executor_utils.py
+++ b/airflow/providers/celery/executors/celery_executor_utils.py
@@ -34,7 +34,6 @@ from typing import TYPE_CHECKING, Any, Mapping,
MutableMapping, Optional, Tuple
from celery import Celery, Task, states as celery_states
from celery.backends.base import BaseKeyValueStoreBackend
from celery.backends.database import DatabaseBackend, Task as TaskDb, retry,
session_cleanup
-from celery.result import AsyncResult
from celery.signals import import_modules as celery_import_modules
from setproctitle import setproctitle
from sqlalchemy import select
@@ -43,7 +42,6 @@ import airflow.settings as settings
from airflow.configuration import conf
from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
from airflow.executors.base_executor import BaseExecutor
-from airflow.models.taskinstance import TaskInstanceKey
from airflow.providers.celery.executors.default_celery import
DEFAULT_CELERY_CONFIG
from airflow.stats import Stats
from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager
@@ -54,7 +52,10 @@ from airflow.utils.timeout import timeout
log = logging.getLogger(__name__)
if TYPE_CHECKING:
+ from celery.result import AsyncResult
+
from airflow.executors.base_executor import CommandType,
EventBufferValueType
+ from airflow.models.taskinstance import TaskInstanceKey
TaskInstanceInCelery = Tuple[TaskInstanceKey, CommandType, Optional[str],
Task]
diff --git a/airflow/providers/celery/executors/celery_kubernetes_executor.py
b/airflow/providers/celery/executors/celery_kubernetes_executor.py
index 725e7fe5c4..0dff33e957 100644
--- a/airflow/providers/celery/executors/celery_kubernetes_executor.py
+++ b/airflow/providers/celery/executors/celery_kubernetes_executor.py
@@ -20,8 +20,6 @@ from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Sequence
-from airflow.callbacks.base_callback_sink import BaseCallbackSink
-from airflow.callbacks.callback_requests import CallbackRequest
from airflow.configuration import conf
from airflow.providers.celery.executors.celery_executor import CeleryExecutor
@@ -36,6 +34,8 @@ from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.providers_configuration_loader import
providers_configuration_loaded
if TYPE_CHECKING:
+ from airflow.callbacks.base_callback_sink import BaseCallbackSink
+ from airflow.callbacks.callback_requests import CallbackRequest
from airflow.executors.base_executor import CommandType,
EventBufferValueType, QueuedTaskInstanceType
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance
from airflow.models.taskinstancekey import TaskInstanceKey
diff --git a/airflow/providers/databricks/hooks/databricks_base.py
b/airflow/providers/databricks/hooks/databricks_base.py
index f77fe8bcda..b461d1a708 100644
--- a/airflow/providers/databricks/hooks/databricks_base.py
+++ b/airflow/providers/databricks/hooks/databricks_base.py
@@ -28,7 +28,7 @@ import copy
import platform
import time
from functools import cached_property
-from typing import Any
+from typing import TYPE_CHECKING, Any
from urllib.parse import urlsplit
import aiohttp
@@ -48,9 +48,11 @@ from tenacity import (
from airflow import __version__
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
-from airflow.models import Connection
from airflow.providers_manager import ProvidersManager
+if TYPE_CHECKING:
+ from airflow.models import Connection
+
#
https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token#--get-an-azure-active-directory-access-token
#
https://docs.microsoft.com/en-us/graph/deployments#app-registration-and-token-service-root-endpoints
AZURE_DEFAULT_AD_ENDPOINT = "https://login.microsoftonline.com"
diff --git a/airflow/providers/databricks/hooks/databricks_sql.py
b/airflow/providers/databricks/hooks/databricks_sql.py
index 362f1ab43b..172ce394f1 100644
--- a/airflow/providers/databricks/hooks/databricks_sql.py
+++ b/airflow/providers/databricks/hooks/databricks_sql.py
@@ -18,15 +18,17 @@ from __future__ import annotations
from contextlib import closing
from copy import copy
-from typing import Any, Callable, Iterable, Mapping, TypeVar, overload
+from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, TypeVar,
overload
from databricks import sql # type: ignore[attr-defined]
-from databricks.sql.client import Connection # type: ignore[attr-defined]
from airflow.exceptions import AirflowException
from airflow.providers.common.sql.hooks.sql import DbApiHook,
return_single_query_results
from airflow.providers.databricks.hooks.databricks_base import
BaseDatabricksHook
+if TYPE_CHECKING:
+ from databricks.sql.client import Connection
+
LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "api/2.0/sql/endpoints")
diff --git a/airflow/providers/databricks/operators/databricks.py
b/airflow/providers/databricks/operators/databricks.py
index ec5d007255..3cc24f1b1b 100644
--- a/airflow/providers/databricks/operators/databricks.py
+++ b/airflow/providers/databricks/operators/databricks.py
@@ -21,7 +21,6 @@ from __future__ import annotations
import time
import warnings
from functools import cached_property
-from logging import Logger
from typing import TYPE_CHECKING, Any, Sequence
from airflow.configuration import conf
@@ -32,6 +31,8 @@ from airflow.providers.databricks.triggers.databricks import
DatabricksExecution
from airflow.providers.databricks.utils.databricks import
normalise_json_content, validate_trigger_event
if TYPE_CHECKING:
+ from logging import Logger
+
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.utils.context import Context
diff --git a/airflow/providers/databricks/operators/databricks_sql.py
b/airflow/providers/databricks/operators/databricks_sql.py
index a209ed7c18..3b86fc8f3a 100644
--- a/airflow/providers/databricks/operators/databricks_sql.py
+++ b/airflow/providers/databricks/operators/databricks_sql.py
@@ -22,7 +22,6 @@ import csv
import json
from typing import TYPE_CHECKING, Any, Sequence
-from databricks.sql.types import Row
from databricks.sql.utils import ParamEscaper
from airflow.exceptions import AirflowException
@@ -31,6 +30,8 @@ from airflow.providers.common.sql.operators.sql import
SQLExecuteQueryOperator
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
if TYPE_CHECKING:
+ from databricks.sql.types import Row
+
from airflow.utils.context import Context
diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py
b/airflow/providers/elasticsearch/hooks/elasticsearch.py
index b504215966..24b8a0f8b0 100644
--- a/airflow/providers/elasticsearch/hooks/elasticsearch.py
+++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py
@@ -19,16 +19,18 @@ from __future__ import annotations
import warnings
from functools import cached_property
-from typing import Any
+from typing import TYPE_CHECKING, Any
from urllib import parse
from elasticsearch import Elasticsearch
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
-from airflow.models.connection import Connection as AirflowConnection
from airflow.providers.common.sql.hooks.sql import DbApiHook
+if TYPE_CHECKING:
+ from airflow.models.connection import Connection as AirflowConnection
+
def connect(
host: str = "localhost",
diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py
b/airflow/providers/elasticsearch/log/es_task_handler.py
index 7f6c5c8873..bbfdb4c2b8 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -21,7 +21,6 @@ import logging
import sys
import warnings
from collections import defaultdict
-from datetime import datetime
from operator import attrgetter
from time import time
from typing import TYPE_CHECKING, Any, Callable, List, Tuple
@@ -35,7 +34,6 @@ from elasticsearch.exceptions import NotFoundError
from airflow.configuration import conf
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models.dagrun import DagRun
-from airflow.models.taskinstance import TaskInstance
from airflow.providers.elasticsearch.log.es_json_formatter import
ElasticsearchJSONFormatter
from airflow.providers.elasticsearch.log.es_response import
ElasticSearchResponse, Hit
from airflow.utils import timezone
@@ -43,6 +41,11 @@ from airflow.utils.log.file_task_handler import
FileTaskHandler
from airflow.utils.log.logging_mixin import ExternalLoggingMixin, LoggingMixin
from airflow.utils.session import create_session
+if TYPE_CHECKING:
+ from datetime import datetime
+
+ from airflow.models.taskinstance import TaskInstance
+
LOG_LINE_DEFAULTS = {"exc_text": "", "stack_info": ""}
# Elasticsearch hosted log type
EsLogMsgType = List[Tuple[str, str]]
diff --git a/airflow/providers/facebook/ads/hooks/ads.py
b/airflow/providers/facebook/ads/hooks/ads.py
index ccfec93e53..074dd1154e 100644
--- a/airflow/providers/facebook/ads/hooks/ads.py
+++ b/airflow/providers/facebook/ads/hooks/ads.py
@@ -21,16 +21,18 @@ from __future__ import annotations
import time
from enum import Enum
from functools import cached_property
-from typing import Any
+from typing import TYPE_CHECKING, Any
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.adreportrun import AdReportRun
-from facebook_business.adobjects.adsinsights import AdsInsights
from facebook_business.api import FacebookAdsApi
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from facebook_business.adobjects.adsinsights import AdsInsights
+
class JobStatus(Enum):
"""Available options for facebook async task status."""
diff --git a/airflow/providers/hashicorp/hooks/vault.py
b/airflow/providers/hashicorp/hooks/vault.py
index 0fe8e21031..67594937f8 100644
--- a/airflow/providers/hashicorp/hooks/vault.py
+++ b/airflow/providers/hashicorp/hooks/vault.py
@@ -19,11 +19,9 @@ from __future__ import annotations
import json
import warnings
-from typing import Any
+from typing import TYPE_CHECKING, Any
-import hvac
from hvac.exceptions import VaultError
-from requests import Response
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
@@ -34,6 +32,10 @@ from
airflow.providers.hashicorp._internal_client.vault_client import (
)
from airflow.utils.helpers import merge_dicts
+if TYPE_CHECKING:
+ import hvac
+ from requests import Response
+
class VaultHook(BaseHook):
"""
diff --git a/airflow/providers/http/operators/http.py
b/airflow/providers/http/operators/http.py
index cf97cf7669..f4010e22d0 100644
--- a/airflow/providers/http/operators/http.py
+++ b/airflow/providers/http/operators/http.py
@@ -21,9 +21,6 @@ import base64
import pickle
from typing import TYPE_CHECKING, Any, Callable, Sequence
-from requests import Response
-from requests.auth import AuthBase
-
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
@@ -31,6 +28,9 @@ from airflow.providers.http.hooks.http import HttpHook
from airflow.providers.http.triggers.http import HttpTrigger
if TYPE_CHECKING:
+ from requests import Response
+ from requests.auth import AuthBase
+
from airflow.utils.context import Context
diff --git a/airflow/providers/http/triggers/http.py
b/airflow/providers/http/triggers/http.py
index c615a632d0..aa2fdab045 100644
--- a/airflow/providers/http/triggers/http.py
+++ b/airflow/providers/http/triggers/http.py
@@ -18,16 +18,18 @@ from __future__ import annotations
import base64
import pickle
-from typing import Any, AsyncIterator
+from typing import TYPE_CHECKING, Any, AsyncIterator
import requests
-from aiohttp.client_reqrep import ClientResponse
from requests.cookies import RequestsCookieJar
from requests.structures import CaseInsensitiveDict
from airflow.providers.http.hooks.http import HttpAsyncHook
from airflow.triggers.base import BaseTrigger, TriggerEvent
+if TYPE_CHECKING:
+ from aiohttp.client_reqrep import ClientResponse
+
class HttpTrigger(BaseTrigger):
"""
diff --git a/airflow/providers/imap/hooks/imap.py
b/airflow/providers/imap/hooks/imap.py
index 3e214a5fea..1ff2c7154b 100644
--- a/airflow/providers/imap/hooks/imap.py
+++ b/airflow/providers/imap/hooks/imap.py
@@ -27,13 +27,15 @@ import imaplib
import os
import re
import ssl
-from typing import Any, Iterable
+from typing import TYPE_CHECKING, Any, Iterable
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
-from airflow.models.connection import Connection
from airflow.utils.log.logging_mixin import LoggingMixin
+if TYPE_CHECKING:
+ from airflow.models.connection import Connection
+
class ImapHook(BaseHook):
"""
diff --git a/airflow/providers/influxdb/hooks/influxdb.py
b/airflow/providers/influxdb/hooks/influxdb.py
index 290dd37727..f5b1d4b692 100644
--- a/airflow/providers/influxdb/hooks/influxdb.py
+++ b/airflow/providers/influxdb/hooks/influxdb.py
@@ -27,15 +27,16 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from influxdb_client import InfluxDBClient
-from influxdb_client.client.flux_table import FluxTable
from influxdb_client.client.write.point import Point
from influxdb_client.client.write_api import SYNCHRONOUS
from airflow.hooks.base import BaseHook
-from airflow.models import Connection
if TYPE_CHECKING:
import pandas as pd
+ from influxdb_client.client.flux_table import FluxTable
+
+ from airflow.models import Connection
class InfluxDBHook(BaseHook):
diff --git a/airflow/providers/jdbc/hooks/jdbc.py
b/airflow/providers/jdbc/hooks/jdbc.py
index 0a1656abd4..7ed9129792 100644
--- a/airflow/providers/jdbc/hooks/jdbc.py
+++ b/airflow/providers/jdbc/hooks/jdbc.py
@@ -17,13 +17,15 @@
# under the License.
from __future__ import annotations
-from typing import Any
+from typing import TYPE_CHECKING, Any
import jaydebeapi
-from airflow.models.connection import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
+if TYPE_CHECKING:
+ from airflow.models.connection import Connection
+
class JdbcHook(DbApiHook):
"""General hook for JDBC access.
diff --git a/airflow/providers/microsoft/azure/hooks/adx.py
b/airflow/providers/microsoft/azure/hooks/adx.py
index 53da21e396..ae6376047e 100644
--- a/airflow/providers/microsoft/azure/hooks/adx.py
+++ b/airflow/providers/microsoft/azure/hooks/adx.py
@@ -27,16 +27,18 @@ from __future__ import annotations
import warnings
from functools import cached_property
-from typing import Any
+from typing import TYPE_CHECKING, Any
from azure.identity import DefaultAzureCredential
from azure.kusto.data import ClientRequestProperties, KustoClient,
KustoConnectionStringBuilder
from azure.kusto.data.exceptions import KustoServiceError
-from azure.kusto.data.response import KustoResponseDataSetV2
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from azure.kusto.data.response import KustoResponseDataSetV2
+
class AzureDataExplorerHook(BaseHook):
"""
diff --git a/airflow/providers/microsoft/azure/hooks/batch.py
b/airflow/providers/microsoft/azure/hooks/batch.py
index 594725c0da..cfa99fcf94 100644
--- a/airflow/providers/microsoft/azure/hooks/batch.py
+++ b/airflow/providers/microsoft/azure/hooks/batch.py
@@ -20,16 +20,18 @@ from __future__ import annotations
import time
from datetime import timedelta
from functools import cached_property
-from typing import Any
+from typing import TYPE_CHECKING, Any
from azure.batch import BatchServiceClient, batch_auth, models as batch_models
-from azure.batch.models import JobAddParameter, PoolAddParameter,
TaskAddParameter
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.providers.microsoft.azure.utils import
AzureIdentityCredentialAdapter, get_field
from airflow.utils import timezone
+if TYPE_CHECKING:
+ from azure.batch.models import JobAddParameter, PoolAddParameter,
TaskAddParameter
+
class AzureBatchHook(BaseHook):
"""
diff --git a/airflow/providers/microsoft/azure/hooks/container_instance.py
b/airflow/providers/microsoft/azure/hooks/container_instance.py
index 8fc845bf13..77e24ab944 100644
--- a/airflow/providers/microsoft/azure/hooks/container_instance.py
+++ b/airflow/providers/microsoft/azure/hooks/container_instance.py
@@ -19,13 +19,16 @@ from __future__ import annotations
import warnings
from functools import cached_property
+from typing import TYPE_CHECKING
from azure.mgmt.containerinstance import ContainerInstanceManagementClient
-from azure.mgmt.containerinstance.models import ContainerGroup
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+if TYPE_CHECKING:
+ from azure.mgmt.containerinstance.models import ContainerGroup
+
class AzureContainerInstanceHook(AzureBaseHook):
"""
diff --git a/airflow/providers/microsoft/azure/hooks/data_factory.py
b/airflow/providers/microsoft/azure/hooks/data_factory.py
index cd00b1b631..7301ace03e 100644
--- a/airflow/providers/microsoft/azure/hooks/data_factory.py
+++ b/airflow/providers/microsoft/azure/hooks/data_factory.py
@@ -34,11 +34,10 @@ import inspect
import time
import warnings
from functools import wraps
-from typing import Any, Callable, TypeVar, Union, cast
+from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, cast
from asgiref.sync import sync_to_async
from azure.core.exceptions import ServiceRequestError
-from azure.core.polling import LROPoller
from azure.identity import ClientSecretCredential, DefaultAzureCredential
from azure.identity.aio import (
ClientSecretCredential as AsyncClientSecretCredential,
@@ -46,21 +45,24 @@ from azure.identity.aio import (
)
from azure.mgmt.datafactory import DataFactoryManagementClient
from azure.mgmt.datafactory.aio import DataFactoryManagementClient as
AsyncDataFactoryManagementClient
-from azure.mgmt.datafactory.models import (
- CreateRunResponse,
- DataFlow,
- DatasetResource,
- Factory,
- LinkedServiceResource,
- PipelineResource,
- PipelineRun,
- TriggerResource,
-)
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.typing_compat import TypedDict
+if TYPE_CHECKING:
+ from azure.core.polling import LROPoller
+ from azure.mgmt.datafactory.models import (
+ CreateRunResponse,
+ DataFlow,
+ DatasetResource,
+ Factory,
+ LinkedServiceResource,
+ PipelineResource,
+ PipelineRun,
+ TriggerResource,
+ )
+
Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
AsyncCredentials = Union[AsyncClientSecretCredential,
AsyncDefaultAzureCredential]
diff --git a/airflow/providers/microsoft/azure/hooks/synapse.py
b/airflow/providers/microsoft/azure/hooks/synapse.py
index 84475a2f38..47e6eea1f9 100644
--- a/airflow/providers/microsoft/azure/hooks/synapse.py
+++ b/airflow/providers/microsoft/azure/hooks/synapse.py
@@ -17,16 +17,18 @@
from __future__ import annotations
import time
-from typing import Any, Union
+from typing import TYPE_CHECKING, Any, Union
from azure.identity import ClientSecretCredential, DefaultAzureCredential
from azure.synapse.spark import SparkClient
-from azure.synapse.spark.models import SparkBatchJobOptions
from airflow.exceptions import AirflowTaskTimeout
from airflow.hooks.base import BaseHook
from airflow.providers.microsoft.azure.utils import get_field
+if TYPE_CHECKING:
+ from azure.synapse.spark.models import SparkBatchJobOptions
+
Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py
b/airflow/providers/microsoft/azure/hooks/wasb.py
index 55c1aba086..59f4b26de5 100644
--- a/airflow/providers/microsoft/azure/hooks/wasb.py
+++ b/airflow/providers/microsoft/azure/hooks/wasb.py
@@ -28,7 +28,7 @@ from __future__ import annotations
import logging
import os
from functools import cached_property
-from typing import Any, Union
+from typing import TYPE_CHECKING, Any, Union
from urllib.parse import urlparse
from asgiref.sync import sync_to_async
@@ -39,7 +39,6 @@ from azure.identity.aio import (
DefaultAzureCredential as AsyncDefaultAzureCredential,
)
from azure.storage.blob import BlobClient, BlobServiceClient, ContainerClient,
StorageStreamDownloader
-from azure.storage.blob._models import BlobProperties
from azure.storage.blob.aio import (
BlobClient as AsyncBlobClient,
BlobServiceClient as AsyncBlobServiceClient,
@@ -49,6 +48,9 @@ from azure.storage.blob.aio import (
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from azure.storage.blob._models import BlobProperties
+
AsyncCredentials = Union[AsyncClientSecretCredential,
AsyncDefaultAzureCredential]
diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py
b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
index 21e96f1003..bee51108e4 100644
--- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py
+++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py
@@ -17,7 +17,6 @@
# under the License.
from __future__ import annotations
-import logging
import os
import shutil
from functools import cached_property
@@ -31,6 +30,9 @@ from airflow.configuration import conf
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
+if TYPE_CHECKING:
+ import logging
+
def get_default_delete_local_copy():
"""Load delete_local_logs conf if Airflow version > 2.6 and return False
if not.
diff --git a/airflow/providers/microsoft/azure/operators/adx.py
b/airflow/providers/microsoft/azure/operators/adx.py
index 49813b6a0b..babf38c038 100644
--- a/airflow/providers/microsoft/azure/operators/adx.py
+++ b/airflow/providers/microsoft/azure/operators/adx.py
@@ -20,13 +20,13 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
-from azure.kusto.data._models import KustoResultTable
-
from airflow.configuration import conf
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.adx import AzureDataExplorerHook
if TYPE_CHECKING:
+ from azure.kusto.data._models import KustoResultTable
+
from airflow.utils.context import Context
diff --git a/airflow/providers/microsoft/azure/operators/asb.py
b/airflow/providers/microsoft/azure/operators/asb.py
index 39a6602ec2..de89b8731f 100644
--- a/airflow/providers/microsoft/azure/operators/asb.py
+++ b/airflow/providers/microsoft/azure/operators/asb.py
@@ -16,7 +16,6 @@
# under the License.
from __future__ import annotations
-import datetime
from typing import TYPE_CHECKING, Any, Sequence
from azure.core.exceptions import ResourceNotFoundError
@@ -25,6 +24,8 @@ from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook,
MessageHook
if TYPE_CHECKING:
+ import datetime
+
from azure.servicebus.management._models import AuthorizationRule
from airflow.utils.context import Context
diff --git a/airflow/providers/microsoft/azure/operators/synapse.py
b/airflow/providers/microsoft/azure/operators/synapse.py
index dd6dda5555..e7fde11528 100644
--- a/airflow/providers/microsoft/azure/operators/synapse.py
+++ b/airflow/providers/microsoft/azure/operators/synapse.py
@@ -19,12 +19,12 @@ from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Sequence
-from azure.synapse.spark.models import SparkBatchJobOptions
-
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.synapse import AzureSynapseHook,
AzureSynapseSparkBatchRunStatus
if TYPE_CHECKING:
+ from azure.synapse.spark.models import SparkBatchJobOptions
+
from airflow.utils.context import Context
diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py
b/airflow/providers/microsoft/psrp/operators/psrp.py
index 1f74225de3..36acda20bc 100644
--- a/airflow/providers/microsoft/psrp/operators/psrp.py
+++ b/airflow/providers/microsoft/psrp/operators/psrp.py
@@ -21,7 +21,6 @@ from logging import DEBUG
from typing import TYPE_CHECKING, Any, Sequence
from jinja2.nativetypes import NativeEnvironment
-from pypsrp.powershell import Command
from pypsrp.serializer import TaggedValue
from airflow.exceptions import AirflowException
@@ -31,6 +30,8 @@ from airflow.settings import json
from airflow.utils.helpers import exactly_one
if TYPE_CHECKING:
+ from pypsrp.powershell import Command
+
from airflow.utils.context import Context
diff --git a/airflow/providers/mongo/hooks/mongo.py
b/airflow/providers/mongo/hooks/mongo.py
index cfa34e0e75..b207c29c13 100644
--- a/airflow/providers/mongo/hooks/mongo.py
+++ b/airflow/providers/mongo/hooks/mongo.py
@@ -19,15 +19,18 @@
from __future__ import annotations
from ssl import CERT_NONE
-from types import TracebackType
-from typing import Any, overload
+from typing import TYPE_CHECKING, Any, overload
from urllib.parse import quote_plus, urlunsplit
import pymongo
from pymongo import MongoClient, ReplaceOne
from airflow.hooks.base import BaseHook
-from airflow.typing_compat import Literal
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from airflow.typing_compat import Literal
class MongoHook(BaseHook):
diff --git a/airflow/providers/mysql/hooks/mysql.py
b/airflow/providers/mysql/hooks/mysql.py
index 3fe86652cc..d7a1bfdd55 100644
--- a/airflow/providers/mysql/hooks/mysql.py
+++ b/airflow/providers/mysql/hooks/mysql.py
@@ -23,12 +23,13 @@ import logging
from typing import TYPE_CHECKING, Any, Union
from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
+ from airflow.models import Connection
+
try:
from mysql.connector.abstracts import MySQLConnectionAbstract
except ModuleNotFoundError:
diff --git a/airflow/providers/neo4j/hooks/neo4j.py
b/airflow/providers/neo4j/hooks/neo4j.py
index 137b1e00c0..c1e1b86393 100644
--- a/airflow/providers/neo4j/hooks/neo4j.py
+++ b/airflow/providers/neo4j/hooks/neo4j.py
@@ -18,13 +18,15 @@
"""This module allows to connect to a Neo4j database."""
from __future__ import annotations
-from typing import Any
+from typing import TYPE_CHECKING, Any
from urllib.parse import urlsplit
from neo4j import Driver, GraphDatabase
from airflow.hooks.base import BaseHook
-from airflow.models import Connection
+
+if TYPE_CHECKING:
+ from airflow.models import Connection
class Neo4jHook(BaseHook):
diff --git a/airflow/providers/openlineage/extractors/base.py
b/airflow/providers/openlineage/extractors/base.py
index 0926489c0d..6cd032fa25 100644
--- a/airflow/providers/openlineage/extractors/base.py
+++ b/airflow/providers/openlineage/extractors/base.py
@@ -18,14 +18,17 @@
from __future__ import annotations
from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING
from attrs import Factory, define
-from openlineage.client.facet import BaseFacet
-from openlineage.client.run import Dataset
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import TaskInstanceState
+if TYPE_CHECKING:
+ from openlineage.client.facet import BaseFacet
+ from openlineage.client.run import Dataset
+
@define
class OperatorLineage:
diff --git a/airflow/providers/openlineage/plugins/adapter.py
b/airflow/providers/openlineage/plugins/adapter.py
index f470631b07..f5037a8559 100644
--- a/airflow/providers/openlineage/plugins/adapter.py
+++ b/airflow/providers/openlineage/plugins/adapter.py
@@ -38,12 +38,12 @@ from openlineage.client.run import Job, Run, RunEvent,
RunState
from airflow.configuration import conf
from airflow.providers.openlineage import __version__ as
OPENLINEAGE_PROVIDER_VERSION
-from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.providers.openlineage.utils.utils import OpenLineageRedactor
from airflow.utils.log.logging_mixin import LoggingMixin
if TYPE_CHECKING:
from airflow.models.dagrun import DagRun
+ from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.utils.log.secrets_masker import SecretsMasker
_DAG_DEFAULT_NAMESPACE = "default"
diff --git a/airflow/providers/openlineage/sqlparser.py
b/airflow/providers/openlineage/sqlparser.py
index ff788afdf2..33bf2c2f39 100644
--- a/airflow/providers/openlineage/sqlparser.py
+++ b/airflow/providers/openlineage/sqlparser.py
@@ -21,7 +21,6 @@ from typing import TYPE_CHECKING, Callable
import sqlparse
from attrs import define
from openlineage.client.facet import BaseFacet, ExtractionError,
ExtractionErrorRunFacet, SqlJobFacet
-from openlineage.client.run import Dataset
from openlineage.common.sql import DbTableMeta, SqlMeta, parse
from airflow.providers.openlineage.extractors.base import OperatorLineage
@@ -33,6 +32,7 @@ from airflow.providers.openlineage.utils.sql import (
from airflow.typing_compat import TypedDict
if TYPE_CHECKING:
+ from openlineage.client.run import Dataset
from sqlalchemy.engine import Engine
from airflow.hooks.base import BaseHook
diff --git a/airflow/providers/pagerduty/hooks/pagerduty_events.py
b/airflow/providers/pagerduty/hooks/pagerduty_events.py
index c1eca607c8..9ad34bd154 100644
--- a/airflow/providers/pagerduty/hooks/pagerduty_events.py
+++ b/airflow/providers/pagerduty/hooks/pagerduty_events.py
@@ -19,14 +19,16 @@
from __future__ import annotations
import warnings
-from datetime import datetime
-from typing import Any
+from typing import TYPE_CHECKING, Any
import pdpyras
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from datetime import datetime
+
class PagerdutyEventsHook(BaseHook):
"""
diff --git a/airflow/providers/postgres/hooks/postgres.py
b/airflow/providers/postgres/hooks/postgres.py
index 95e7be94cb..ecdd331f9e 100644
--- a/airflow/providers/postgres/hooks/postgres.py
+++ b/airflow/providers/postgres/hooks/postgres.py
@@ -26,14 +26,15 @@ from typing import TYPE_CHECKING, Any, Iterable, Union
import psycopg2
import psycopg2.extensions
import psycopg2.extras
-from psycopg2.extensions import connection
from psycopg2.extras import DictCursor, NamedTupleCursor, RealDictCursor
from airflow.exceptions import AirflowProviderDeprecationWarning
-from airflow.models.connection import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
if TYPE_CHECKING:
+ from psycopg2.extensions import connection
+
+ from airflow.models.connection import Connection
from airflow.providers.openlineage.sqlparser import DatabaseInfo
CursorType = Union[DictCursor, RealDictCursor, NamedTupleCursor]
diff --git a/airflow/providers/presto/hooks/presto.py
b/airflow/providers/presto/hooks/presto.py
index 028deb48ed..26ef545be3 100644
--- a/airflow/providers/presto/hooks/presto.py
+++ b/airflow/providers/presto/hooks/presto.py
@@ -19,7 +19,7 @@ from __future__ import annotations
import json
import os
-from typing import Any, Iterable, Mapping, TypeVar
+from typing import TYPE_CHECKING, Any, Iterable, Mapping, TypeVar
import prestodb
from prestodb.exceptions import DatabaseError
@@ -27,10 +27,12 @@ from prestodb.transaction import IsolationLevel
from airflow import AirflowException
from airflow.configuration import conf
-from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING,
DEFAULT_FORMAT_PREFIX
+if TYPE_CHECKING:
+ from airflow.models import Connection
+
T = TypeVar("T")
diff --git a/airflow/providers/redis/log/redis_task_handler.py
b/airflow/providers/redis/log/redis_task_handler.py
index 03e2767418..2befdadbe7 100644
--- a/airflow/providers/redis/log/redis_task_handler.py
+++ b/airflow/providers/redis/log/redis_task_handler.py
@@ -19,16 +19,18 @@ from __future__ import annotations
import logging
from functools import cached_property
-from typing import Any
-
-from redis import Redis
+from typing import TYPE_CHECKING, Any
from airflow.configuration import conf
-from airflow.models import TaskInstance
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
+if TYPE_CHECKING:
+ from redis import Redis
+
+ from airflow.models import TaskInstance
+
class RedisTaskHandler(FileTaskHandler, LoggingMixin):
"""
diff --git a/airflow/providers/salesforce/hooks/salesforce.py
b/airflow/providers/salesforce/hooks/salesforce.py
index 5b390877f2..bca3521226 100644
--- a/airflow/providers/salesforce/hooks/salesforce.py
+++ b/airflow/providers/salesforce/hooks/salesforce.py
@@ -28,13 +28,13 @@ import time
from functools import cached_property
from typing import TYPE_CHECKING, Any, Iterable
-from requests import Session
from simple_salesforce import Salesforce, api
from airflow.hooks.base import BaseHook
if TYPE_CHECKING:
import pandas as pd
+ from requests import Session
log = logging.getLogger(__name__)
diff --git a/airflow/providers/salesforce/operators/bulk.py
b/airflow/providers/salesforce/operators/bulk.py
index 2b22b1ff4a..554d96656b 100644
--- a/airflow/providers/salesforce/operators/bulk.py
+++ b/airflow/providers/salesforce/operators/bulk.py
@@ -20,9 +20,9 @@ from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.salesforce.hooks.salesforce import SalesforceHook
-from airflow.typing_compat import Literal
if TYPE_CHECKING:
+ from airflow.typing_compat import Literal
from airflow.utils.context import Context
diff --git a/airflow/providers/samba/hooks/samba.py
b/airflow/providers/samba/hooks/samba.py
index 761a4ea50a..5cce4c8d92 100644
--- a/airflow/providers/samba/hooks/samba.py
+++ b/airflow/providers/samba/hooks/samba.py
@@ -20,12 +20,15 @@ from __future__ import annotations
import posixpath
from functools import wraps
from shutil import copyfileobj
+from typing import TYPE_CHECKING
import smbclient
-import smbprotocol.connection
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ import smbprotocol.connection
+
class SambaHook(BaseHook):
"""Allows for interaction with a Samba server.
diff --git a/airflow/providers/sftp/hooks/sftp.py
b/airflow/providers/sftp/hooks/sftp.py
index e02872b4b0..7f3b851c9f 100644
--- a/airflow/providers/sftp/hooks/sftp.py
+++ b/airflow/providers/sftp/hooks/sftp.py
@@ -23,13 +23,14 @@ import os
import stat
import warnings
from fnmatch import fnmatch
-from typing import Any, Callable
-
-import paramiko
+from typing import TYPE_CHECKING, Any, Callable
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.providers.ssh.hooks.ssh import SSHHook
+if TYPE_CHECKING:
+ import paramiko
+
class SFTPHook(SSHHook):
"""Interact with SFTP.
diff --git a/airflow/providers/slack/transfers/sql_to_slack.py
b/airflow/providers/slack/transfers/sql_to_slack.py
index 8a1d4fa478..aa152d4dc0 100644
--- a/airflow/providers/slack/transfers/sql_to_slack.py
+++ b/airflow/providers/slack/transfers/sql_to_slack.py
@@ -24,7 +24,6 @@ from tabulate import tabulate
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator
-from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.slack.hooks.slack import SlackHook
from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook
from airflow.providers.slack.utils import parse_filename
@@ -32,6 +31,7 @@ from airflow.providers.slack.utils import parse_filename
if TYPE_CHECKING:
import pandas as pd
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.utils.context import Context
diff --git a/airflow/providers/smtp/hooks/smtp.py
b/airflow/providers/smtp/hooks/smtp.py
index e9a36076e8..0f2c689c37 100644
--- a/airflow/providers/smtp/hooks/smtp.py
+++ b/airflow/providers/smtp/hooks/smtp.py
@@ -31,11 +31,13 @@ from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
-from typing import Any, Iterable
+from typing import TYPE_CHECKING, Any, Iterable
from airflow.exceptions import AirflowException, AirflowNotFoundException
from airflow.hooks.base import BaseHook
-from airflow.models.connection import Connection
+
+if TYPE_CHECKING:
+ from airflow.models.connection import Connection
class SmtpHook(BaseHook):
diff --git a/airflow/providers/smtp/operators/smtp.py
b/airflow/providers/smtp/operators/smtp.py
index 8a00a96f6f..8dd6889ab0 100644
--- a/airflow/providers/smtp/operators/smtp.py
+++ b/airflow/providers/smtp/operators/smtp.py
@@ -17,11 +17,13 @@
# under the License.
from __future__ import annotations
-from typing import Any, Sequence
+from typing import TYPE_CHECKING, Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.smtp.hooks.smtp import SmtpHook
-from airflow.utils.context import Context
+
+if TYPE_CHECKING:
+ from airflow.utils.context import Context
class EmailOperator(BaseOperator):
diff --git a/airflow/providers/snowflake/triggers/snowflake_trigger.py
b/airflow/providers/snowflake/triggers/snowflake_trigger.py
index bb426fa3d0..32ed855325 100644
--- a/airflow/providers/snowflake/triggers/snowflake_trigger.py
+++ b/airflow/providers/snowflake/triggers/snowflake_trigger.py
@@ -17,12 +17,14 @@
from __future__ import annotations
import asyncio
-from datetime import timedelta
-from typing import Any, AsyncIterator
+from typing import TYPE_CHECKING, Any, AsyncIterator
from airflow.providers.snowflake.hooks.snowflake_sql_api import
SnowflakeSqlApiHook
from airflow.triggers.base import BaseTrigger, TriggerEvent
+if TYPE_CHECKING:
+ from datetime import timedelta
+
class SnowflakeSqlApiTrigger(BaseTrigger):
"""
diff --git a/airflow/providers/tableau/hooks/tableau.py
b/airflow/providers/tableau/hooks/tableau.py
index 5127a3dc9a..182fe5f99f 100644
--- a/airflow/providers/tableau/hooks/tableau.py
+++ b/airflow/providers/tableau/hooks/tableau.py
@@ -19,14 +19,16 @@ from __future__ import annotations
import time
import warnings
from enum import Enum
-from typing import Any
+from typing import TYPE_CHECKING, Any
from tableauserverclient import Pager, PersonalAccessTokenAuth, Server,
TableauAuth
-from tableauserverclient.server import Auth
from airflow.exceptions import AirflowException,
AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from tableauserverclient.server import Auth
+
def parse_boolean(val: str) -> str | bool:
"""Try to parse a string into boolean.
diff --git a/airflow/providers/trino/hooks/trino.py
b/airflow/providers/trino/hooks/trino.py
index e8a90951a7..22b433312d 100644
--- a/airflow/providers/trino/hooks/trino.py
+++ b/airflow/providers/trino/hooks/trino.py
@@ -19,7 +19,7 @@ from __future__ import annotations
import json
import os
-from typing import Any, Iterable, Mapping, TypeVar
+from typing import TYPE_CHECKING, Any, Iterable, Mapping, TypeVar
import trino
from trino.exceptions import DatabaseError
@@ -27,10 +27,12 @@ from trino.transaction import IsolationLevel
from airflow import AirflowException
from airflow.configuration import conf
-from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING,
DEFAULT_FORMAT_PREFIX
+if TYPE_CHECKING:
+ from airflow.models import Connection
+
T = TypeVar("T")
diff --git a/airflow/providers/zendesk/hooks/zendesk.py
b/airflow/providers/zendesk/hooks/zendesk.py
index 5a8ba2f910..02734a912f 100644
--- a/airflow/providers/zendesk/hooks/zendesk.py
+++ b/airflow/providers/zendesk/hooks/zendesk.py
@@ -17,13 +17,17 @@
# under the License.
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from zenpy import Zenpy
-from zenpy.lib.api import BaseApi
-from zenpy.lib.api_objects import JobStatus, Ticket, TicketAudit
-from zenpy.lib.generator import SearchResultGenerator
from airflow.hooks.base import BaseHook
+if TYPE_CHECKING:
+ from zenpy.lib.api import BaseApi
+ from zenpy.lib.api_objects import JobStatus, Ticket, TicketAudit
+ from zenpy.lib.generator import SearchResultGenerator
+
class ZendeskHook(BaseHook):
"""
diff --git a/tests/providers/databricks/operators/test_databricks_sql.py
b/tests/providers/databricks/operators/test_databricks_sql.py
index 539727e787..e247e4deca 100644
--- a/tests/providers/databricks/operators/test_databricks_sql.py
+++ b/tests/providers/databricks/operators/test_databricks_sql.py
@@ -21,9 +21,10 @@ import os
from unittest.mock import patch
import pytest
+from databricks.sql.types import Row
from airflow.providers.common.sql.hooks.sql import fetch_all_handler
-from airflow.providers.databricks.operators.databricks_sql import
DatabricksSqlOperator, Row
+from airflow.providers.databricks.operators.databricks_sql import
DatabricksSqlOperator
from airflow.serialization.serde import serialize
DATE = "2017-04-20"
diff --git
a/tests/providers/microsoft/azure/operators/test_azure_data_factory.py
b/tests/providers/microsoft/azure/operators/test_azure_data_factory.py
index e4dd61c1d2..98ded34e1c 100644
--- a/tests/providers/microsoft/azure/operators/test_azure_data_factory.py
+++ b/tests/providers/microsoft/azure/operators/test_azure_data_factory.py
@@ -16,6 +16,7 @@
# under the License.
from __future__ import annotations
+from typing import TYPE_CHECKING
from unittest import mock
from unittest.mock import MagicMock, patch
@@ -24,7 +25,6 @@ import pytest
from airflow.exceptions import AirflowException, TaskDeferred
from airflow.models import DAG, Connection
-from airflow.models.baseoperator import BaseOperator
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance
from airflow.providers.microsoft.azure.hooks.data_factory import (
@@ -37,6 +37,9 @@ from airflow.providers.microsoft.azure.triggers.data_factory
import AzureDataFac
from airflow.utils import timezone
from airflow.utils.types import DagRunType
+if TYPE_CHECKING:
+ from airflow.models.baseoperator import BaseOperator
+
DEFAULT_DATE = timezone.datetime(2021, 1, 1)
SUBSCRIPTION_ID = "my-subscription-id"
TASK_ID = "run_pipeline_op"
diff --git a/tests/providers/microsoft/azure/sensors/test_wasb.py
b/tests/providers/microsoft/azure/sensors/test_wasb.py
index 6067030694..a94fd0c07b 100644
--- a/tests/providers/microsoft/azure/sensors/test_wasb.py
+++ b/tests/providers/microsoft/azure/sensors/test_wasb.py
@@ -18,6 +18,7 @@
from __future__ import annotations
import datetime
+from typing import TYPE_CHECKING
from unittest import mock
import pendulum
@@ -25,7 +26,6 @@ import pytest
from airflow.exceptions import AirflowException, TaskDeferred
from airflow.models import Connection
-from airflow.models.baseoperator import BaseOperator
from airflow.models.dag import DAG
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance
@@ -37,6 +37,9 @@ from airflow.providers.microsoft.azure.triggers.wasb import
WasbBlobSensorTrigge
from airflow.utils import timezone
from airflow.utils.types import DagRunType
+if TYPE_CHECKING:
+ from airflow.models.baseoperator import BaseOperator
+
TEST_DATA_STORAGE_BLOB_NAME = "test_blob_providers.txt"
TEST_DATA_STORAGE_CONTAINER_NAME = "test-container-providers"
TEST_DATA_STORAGE_BLOB_PREFIX = TEST_DATA_STORAGE_BLOB_NAME[:10]
diff --git a/tests/providers/mongo/hooks/test_mongo.py
b/tests/providers/mongo/hooks/test_mongo.py
index 292af259d6..4d46a613e7 100644
--- a/tests/providers/mongo/hooks/test_mongo.py
+++ b/tests/providers/mongo/hooks/test_mongo.py
@@ -18,7 +18,7 @@
from __future__ import annotations
import importlib
-from types import ModuleType
+from typing import TYPE_CHECKING
import pymongo
import pytest
@@ -27,6 +27,9 @@ from airflow.models import Connection
from airflow.providers.mongo.hooks.mongo import MongoHook
from airflow.utils import db
+if TYPE_CHECKING:
+ from types import ModuleType
+
mongomock: ModuleType | None
try:
diff --git a/tests/providers/snowflake/hooks/test_snowflake.py
b/tests/providers/snowflake/hooks/test_snowflake.py
index fef57bc8fc..9170edc3f0 100644
--- a/tests/providers/snowflake/hooks/test_snowflake.py
+++ b/tests/providers/snowflake/hooks/test_snowflake.py
@@ -19,8 +19,7 @@ from __future__ import annotations
import json
from copy import deepcopy
-from pathlib import Path
-from typing import Any
+from typing import TYPE_CHECKING, Any
from unittest import mock
import pytest
@@ -32,6 +31,9 @@ from airflow.models import Connection
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
from tests.test_utils.providers import get_provider_min_airflow_version,
object_exists
+if TYPE_CHECKING:
+ from pathlib import Path
+
_PASSWORD = "snowflake42"
BASE_CONNECTION_KWARGS: dict = {
diff --git a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py
b/tests/providers/snowflake/hooks/test_snowflake_sql_api.py
index fd2da72c92..926a8248fb 100644
--- a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py
+++ b/tests/providers/snowflake/hooks/test_snowflake_sql_api.py
@@ -18,8 +18,7 @@ from __future__ import annotations
import unittest
import uuid
-from pathlib import Path
-from typing import Any
+from typing import TYPE_CHECKING, Any
from unittest import mock
from unittest.mock import AsyncMock
@@ -35,6 +34,9 @@ from airflow.providers.snowflake.hooks.snowflake_sql_api
import (
SnowflakeSqlApiHook,
)
+if TYPE_CHECKING:
+ from pathlib import Path
+
SQL_MULTIPLE_STMTS = (
"create or replace table user_test (i int); insert into user_test (i) "
"values (200); insert into user_test (i) values (300); select i from
user_test order by i;"