This is an automated email from the ASF dual-hosted git repository.
uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 6ae0a80cba Upgrade mypy to 0.991 (#28926)
6ae0a80cba is described below
commit 6ae0a80cbaf1d33343b763c7f82612b4522afc40
Author: Ash Berlin-Taylor <[email protected]>
AuthorDate: Fri Feb 10 18:25:39 2023 +0000
Upgrade mypy to 0.991 (#28926)
Co-authored-by: Tzu-ping Chung <[email protected]>
---
.pre-commit-config.yaml | 2 +-
airflow/dag_processing/manager.py | 3 +--
airflow/dag_processing/processor.py | 3 +--
airflow/decorators/base.py | 2 +-
airflow/example_dags/plugins/workday.py | 3 +++
airflow/executors/kubernetes_executor.py | 2 +-
airflow/hooks/base.py | 4 ++--
airflow/listeners/listener.py | 2 +-
airflow/models/baseoperator.py | 4 ++--
airflow/providers/google/cloud/hooks/cloud_sql.py | 2 --
airflow/providers/google/cloud/hooks/functions.py | 2 +-
airflow/providers/google/cloud/hooks/gcs.py | 18 ++++++++++++----
.../providers/google/cloud/hooks/life_sciences.py | 2 +-
.../google/cloud/utils/field_validator.py | 2 +-
.../providers/google/firebase/hooks/firestore.py | 2 +-
.../google/marketing_platform/hooks/search_ads.py | 2 +-
airflow/providers/microsoft/psrp/hooks/psrp.py | 3 +--
airflow/settings.py | 2 +-
airflow/stats.py | 7 +++---
airflow/timetables/simple.py | 3 +++
airflow/utils/mixins.py | 6 ++++++
airflow/utils/session.py | 2 +-
airflow/utils/state.py | 2 +-
airflow/www/extensions/init_appbuilder.py | 2 ++
airflow/www/fab_security/manager.py | 2 ++
dev/breeze/src/airflow_breeze/utils/click_utils.py | 2 +-
.../pre_commit_check_order_dockerfile_extras.py | 2 +-
.../ci/pre_commit/pre_commit_check_order_setup.py | 2 +-
...re_commit_check_system_tests_hidden_in_index.py | 3 ++-
.../pre_commit_update_common_sql_api_stubs.py | 25 +++++++++++++---------
.../in_container/run_provider_yaml_files_check.py | 2 +-
setup.cfg | 7 ++++++
setup.py | 2 +-
tests/listeners/dag_listener.py | 4 +++-
tests/listeners/full_listener.py | 8 ++++---
tests/listeners/lifecycle_listener.py | 6 ++++--
tests/listeners/partial_listener.py | 2 +-
tests/test_utils/timetables.py | 9 ++++++++
tests/www/views/test_views_connection.py | 12 ++++-------
39 files changed, 108 insertions(+), 62 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6499584074..9acd241f74 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -160,7 +160,7 @@ repos:
entry:
./scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
language: python
files:
^scripts/ci/pre_commit/pre_commit_update_common_sql_api\.py|^airflow/providers/common/sql/.*\.pyi?$
- additional_dependencies: ['rich>=12.4.4', 'mypy==0.971',
'black==22.12.0', 'jinja2']
+ additional_dependencies: ['rich>=12.4.4', 'mypy==0.991',
'black==22.12.0', 'jinja2']
pass_filenames: false
require_serial: true
- id: update-black-version
diff --git a/airflow/dag_processing/manager.py
b/airflow/dag_processing/manager.py
index 232d0d7e62..2246e7f3a7 100644
--- a/airflow/dag_processing/manager.py
+++ b/airflow/dag_processing/manager.py
@@ -141,8 +141,7 @@ class DagFileProcessorAgent(LoggingMixin,
MultiprocessingStartMethodMixin):
def start(self) -> None:
"""Launch DagFileProcessorManager processor and start DAG parsing loop
in manager."""
- mp_start_method = self._get_multiprocessing_start_method()
- context = multiprocessing.get_context(mp_start_method)
+ context = self._get_multiprocessing_context()
self._last_parsing_stat_received_at = time.monotonic()
self._parent_signal_conn, child_signal_conn = context.Pipe()
diff --git a/airflow/dag_processing/processor.py
b/airflow/dag_processing/processor.py
index bce6fb9148..62441c2b5b 100644
--- a/airflow/dag_processing/processor.py
+++ b/airflow/dag_processing/processor.py
@@ -187,8 +187,7 @@ class DagFileProcessorProcess(LoggingMixin,
MultiprocessingStartMethodMixin):
def start(self) -> None:
"""Launch the process and start processing the DAG."""
- start_method = self._get_multiprocessing_start_method()
- context = multiprocessing.get_context(start_method)
+ context = self._get_multiprocessing_context()
_parent_channel, _child_channel = context.Pipe(duplex=False)
process = context.Process(
diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py
index 7da74e5514..b15acba877 100644
--- a/airflow/decorators/base.py
+++ b/airflow/decorators/base.py
@@ -489,7 +489,7 @@ class DecoratedMappedOperator(MappedOperator):
return super()._get_unmap_kwargs(kwargs, strict=False)
-class Task(Generic[FParams, FReturn]):
+class Task(Protocol, Generic[FParams, FReturn]):
"""Declaration of a @task-decorated callable for type-checking.
An instance of this type inherits the call signature of the decorated
diff --git a/airflow/example_dags/plugins/workday.py
b/airflow/example_dags/plugins/workday.py
index db68c29541..25f7a8c9cc 100644
--- a/airflow/example_dags/plugins/workday.py
+++ b/airflow/example_dags/plugins/workday.py
@@ -78,6 +78,9 @@ class AfterWorkdayTimetable(Timetable):
# [END howto_timetable_next_dagrun_info]
+ def validate(self):
+ pass
+
class WorkdayTimetablePlugin(AirflowPlugin):
name = "workday_timetable_plugin"
diff --git a/airflow/executors/kubernetes_executor.py
b/airflow/executors/kubernetes_executor.py
index b388a3b386..962e64cd48 100644
--- a/airflow/executors/kubernetes_executor.py
+++ b/airflow/executors/kubernetes_executor.py
@@ -69,7 +69,7 @@ KubernetesWatchType = Tuple[str, str, Optional[str],
Dict[str, str], str]
class ResourceVersion:
"""Singleton for tracking resourceVersion from Kubernetes."""
- _instance = None
+ _instance: ResourceVersion | None = None
resource_version: dict[str, str] = {}
def __new__(cls):
diff --git a/airflow/hooks/base.py b/airflow/hooks/base.py
index 9298a68688..c1c758a756 100644
--- a/airflow/hooks/base.py
+++ b/airflow/hooks/base.py
@@ -90,11 +90,11 @@ class BaseHook(LoggingMixin):
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
- ...
+ return {}
@classmethod
def get_ui_field_behaviour(cls) -> dict[str, Any]:
- ...
+ return {}
class DiscoverableHook(Protocol):
diff --git a/airflow/listeners/listener.py b/airflow/listeners/listener.py
index 546d732513..dd5481cba1 100644
--- a/airflow/listeners/listener.py
+++ b/airflow/listeners/listener.py
@@ -30,7 +30,7 @@ if TYPE_CHECKING:
log = logging.getLogger(__name__)
-_listener_manager = None
+_listener_manager: ListenerManager | None = None
class ListenerManager:
diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py
index 10c55d5b63..2b28975c5f 100644
--- a/airflow/models/baseoperator.py
+++ b/airflow/models/baseoperator.py
@@ -30,7 +30,7 @@ import warnings
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
from inspect import signature
-from types import FunctionType
+from types import ClassMethodDescriptorType, FunctionType
from typing import (
TYPE_CHECKING,
Any,
@@ -169,7 +169,7 @@ def get_merged_defaults(
class _PartialDescriptor:
"""A descriptor that guards against ``.partial`` being called on Task
objects."""
- class_method = None
+ class_method: ClassMethodDescriptorType | None = None
def __get__(
self, obj: BaseOperator, cls: type[BaseOperator] | None = None
diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py
b/airflow/providers/google/cloud/hooks/cloud_sql.py
index 7f96d63156..15c8fed148 100644
--- a/airflow/providers/google/cloud/hooks/cloud_sql.py
+++ b/airflow/providers/google/cloud/hooks/cloud_sql.py
@@ -691,8 +691,6 @@ class CloudSQLDatabaseHook(BaseHook):
conn_type = "gcpcloudsqldb"
hook_name = "Google Cloud SQL Database"
- _conn = None
-
def __init__(
self,
gcp_cloudsql_conn_id: str = "google_cloud_sql_default",
diff --git a/airflow/providers/google/cloud/hooks/functions.py
b/airflow/providers/google/cloud/hooks/functions.py
index 3ed62a75dd..084d6fb06a 100644
--- a/airflow/providers/google/cloud/hooks/functions.py
+++ b/airflow/providers/google/cloud/hooks/functions.py
@@ -40,7 +40,7 @@ class CloudFunctionsHook(GoogleBaseHook):
keyword arguments rather than positional.
"""
- _conn = None
+ _conn: build | None = None
def __init__(
self,
diff --git a/airflow/providers/google/cloud/hooks/gcs.py
b/airflow/providers/google/cloud/hooks/gcs.py
index e7ef1210ec..ceaa202a00 100644
--- a/airflow/providers/google/cloud/hooks/gcs.py
+++ b/airflow/providers/google/cloud/hooks/gcs.py
@@ -51,8 +51,18 @@ from airflow.providers.google.common.hooks.base_google
import GoogleBaseAsyncHoo
from airflow.utils import timezone
from airflow.version import version
+try:
+ # Airflow 2.3 doesn't have this yet
+ from airflow.typing_compat import ParamSpec
+except ImportError:
+ try:
+ from typing import ParamSpec # type: ignore[no-redef,attr-defined]
+ except ImportError:
+ from typing_extensions import ParamSpec
+
RT = TypeVar("RT")
T = TypeVar("T", bound=Callable)
+FParams = ParamSpec("FParams")
# GCSHook has a method named 'list' (to junior devs: please don't do this), so
# we need to create an alias to prevent Mypy being confused.
@@ -76,7 +86,7 @@ def _fallback_object_url_to_object_name_and_bucket_name(
:return: Decorator
"""
- def _wrapper(func: T):
+ def _wrapper(func: Callable[FParams, RT]) -> Callable[FParams, RT]:
@functools.wraps(func)
def _inner_wrapper(self: GCSHook, *args, **kwargs) -> RT:
if args:
@@ -117,11 +127,11 @@ def _fallback_object_url_to_object_name_and_bucket_name(
f"'{bucket_name_keyword_arg_name}'"
)
- return func(self, *args, **kwargs)
+ return func(self, *args, **kwargs) # type: ignore
- return cast(T, _inner_wrapper)
+ return cast(Callable[FParams, RT], _inner_wrapper)
- return _wrapper
+ return cast(Callable[[T], T], _wrapper)
# A fake bucket to use in functions decorated by
_fallback_object_url_to_object_name_and_bucket_name.
diff --git a/airflow/providers/google/cloud/hooks/life_sciences.py
b/airflow/providers/google/cloud/hooks/life_sciences.py
index 00ed37e411..24ad1792ac 100644
--- a/airflow/providers/google/cloud/hooks/life_sciences.py
+++ b/airflow/providers/google/cloud/hooks/life_sciences.py
@@ -54,7 +54,7 @@ class LifeSciencesHook(GoogleBaseHook):
account from the list granting this role to the originating account.
"""
- _conn = None
+ _conn: build | None = None
def __init__(
self,
diff --git a/airflow/providers/google/cloud/utils/field_validator.py
b/airflow/providers/google/cloud/utils/field_validator.py
index 70ad1827af..499cb0f7e3 100644
--- a/airflow/providers/google/cloud/utils/field_validator.py
+++ b/airflow/providers/google/cloud/utils/field_validator.py
@@ -206,7 +206,7 @@ class GcpBodyFieldValidator(LoggingMixin):
full_field_path: str,
regexp: str,
allow_empty: bool,
- custom_validation: Callable,
+ custom_validation: Callable | None,
value,
) -> None:
if value is None and field_type != "union":
diff --git a/airflow/providers/google/firebase/hooks/firestore.py
b/airflow/providers/google/firebase/hooks/firestore.py
index f55f13f264..2cf36eabc0 100644
--- a/airflow/providers/google/firebase/hooks/firestore.py
+++ b/airflow/providers/google/firebase/hooks/firestore.py
@@ -53,7 +53,7 @@ class CloudFirestoreHook(GoogleBaseHook):
account from the list granting this role to the originating account.
"""
- _conn = None
+ _conn: build | None = None
def __init__(
self,
diff --git a/airflow/providers/google/marketing_platform/hooks/search_ads.py
b/airflow/providers/google/marketing_platform/hooks/search_ads.py
index 8115833b1a..859878c0f1 100644
--- a/airflow/providers/google/marketing_platform/hooks/search_ads.py
+++ b/airflow/providers/google/marketing_platform/hooks/search_ads.py
@@ -28,7 +28,7 @@ from airflow.providers.google.common.hooks.base_google import
GoogleBaseHook
class GoogleSearchAdsHook(GoogleBaseHook):
"""Hook for Google Search Ads 360."""
- _conn = None
+ _conn: build | None = None
def __init__(
self,
diff --git a/airflow/providers/microsoft/psrp/hooks/psrp.py
b/airflow/providers/microsoft/psrp/hooks/psrp.py
index 4e4518609f..c6cd54f45a 100644
--- a/airflow/providers/microsoft/psrp/hooks/psrp.py
+++ b/airflow/providers/microsoft/psrp/hooks/psrp.py
@@ -75,8 +75,7 @@ class PsrpHook(BaseHook):
or by setting this key as the extra fields of your connection.
"""
- _conn = None
- _configuration_name = None
+ _conn: RunspacePool | None = None
_wsman_ref: WeakKeyDictionary[RunspacePool, WSMan] = WeakKeyDictionary()
def __init__(
diff --git a/airflow/settings.py b/airflow/settings.py
index 64678b8018..436ba4b077 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -411,7 +411,7 @@ def dispose_orm():
global engine
global Session
- if Session:
+ if Session is not None:
Session.remove()
Session = None
if engine:
diff --git a/airflow/stats.py b/airflow/stats.py
index d2708f1e13..7b70ce345f 100644
--- a/airflow/stats.py
+++ b/airflow/stats.py
@@ -74,9 +74,10 @@ class StatsLogger(Protocol):
@classmethod
def timer(cls, *args, **kwargs) -> TimerProtocol:
"""Timer metric that can be cancelled."""
+ raise NotImplementedError()
-class Timer:
+class Timer(TimerProtocol):
"""
Timer that records duration, and optional sends to StatsD backend.
@@ -360,7 +361,7 @@ class SafeDogStatsdLogger:
class _Stats(type):
- factory = None
+ factory: Callable[[], StatsLogger]
instance: StatsLogger | None = None
def __getattr__(cls, name):
@@ -374,7 +375,7 @@ class _Stats(type):
def __init__(cls, *args, **kwargs):
super().__init__(cls)
- if cls.__class__.factory is None:
+ if not hasattr(cls.__class__, "factory"):
is_datadog_enabled_defined = conf.has_option("metrics",
"statsd_datadog_enabled")
if is_datadog_enabled_defined and conf.getboolean("metrics",
"statsd_datadog_enabled"):
cls.__class__.factory = cls.get_dogstatsd_logger
diff --git a/airflow/timetables/simple.py b/airflow/timetables/simple.py
index 3ddd31b691..05595f6343 100644
--- a/airflow/timetables/simple.py
+++ b/airflow/timetables/simple.py
@@ -55,6 +55,9 @@ class _TrivialTimetable(Timetable):
def infer_manual_data_interval(self, *, run_after: DateTime) ->
DataInterval:
return DataInterval.exact(run_after)
+ def validate(self):
+ pass
+
class NullTimetable(_TrivialTimetable):
"""Timetable that never schedules anything.
diff --git a/airflow/utils/mixins.py b/airflow/utils/mixins.py
index ff28407094..d157c7f078 100644
--- a/airflow/utils/mixins.py
+++ b/airflow/utils/mixins.py
@@ -25,6 +25,8 @@ from airflow.configuration import conf
from airflow.utils.context import Context
if typing.TYPE_CHECKING:
+ import multiprocessing.context
+
from airflow.models.operator import Operator
@@ -44,6 +46,10 @@ class MultiprocessingStartMethodMixin:
raise ValueError("Failed to determine start method")
return method
+ def _get_multiprocessing_context(self) ->
multiprocessing.context.DefaultContext:
+ mp_start_method = self._get_multiprocessing_start_method()
+ return multiprocessing.get_context(mp_start_method) # type: ignore
+
class ResolveMixin:
"""A runtime-resolved value."""
diff --git a/airflow/utils/session.py b/airflow/utils/session.py
index f48eab9ed0..a11462b945 100644
--- a/airflow/utils/session.py
+++ b/airflow/utils/session.py
@@ -28,7 +28,7 @@ from airflow.typing_compat import ParamSpec
@contextlib.contextmanager
def create_session() -> Generator[settings.SASession, None, None]:
"""Contextmanager that will create and teardown a session."""
- if not settings.Session:
+ if not hasattr(settings, "Session") or settings.Session is None:
raise RuntimeError("Session must be set before!")
session = settings.Session()
try:
diff --git a/airflow/utils/state.py b/airflow/utils/state.py
index 6558af9f2a..780a3bbe60 100644
--- a/airflow/utils/state.py
+++ b/airflow/utils/state.py
@@ -85,7 +85,7 @@ class State:
FAILED = TaskInstanceState.FAILED
# These are TaskState only
- NONE = None
+ NONE: None = None
REMOVED = TaskInstanceState.REMOVED
SCHEDULED = TaskInstanceState.SCHEDULED
QUEUED = TaskInstanceState.QUEUED
diff --git a/airflow/www/extensions/init_appbuilder.py
b/airflow/www/extensions/init_appbuilder.py
index acd12b319f..fe1ae0938d 100644
--- a/airflow/www/extensions/init_appbuilder.py
+++ b/airflow/www/extensions/init_appbuilder.py
@@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+
+# mypy: disable-error-code=var-annotated
from __future__ import annotations
import logging
diff --git a/airflow/www/fab_security/manager.py
b/airflow/www/fab_security/manager.py
index ea8918053c..a23315b814 100644
--- a/airflow/www/fab_security/manager.py
+++ b/airflow/www/fab_security/manager.py
@@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+
+# mypy: disable-error-code=var-annotated
from __future__ import annotations
import base64
diff --git a/dev/breeze/src/airflow_breeze/utils/click_utils.py
b/dev/breeze/src/airflow_breeze/utils/click_utils.py
index d6ce5f1255..46b7e8845f 100644
--- a/dev/breeze/src/airflow_breeze/utils/click_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/click_utils.py
@@ -19,4 +19,4 @@ from __future__ import annotations
try:
from rich_click import RichGroup as BreezeGroup
except ImportError:
- from click import Group as BreezeGroup # type: ignore[misc] # noqa
+ from click import Group as BreezeGroup # type: ignore[assignment] # noqa
diff --git a/scripts/ci/pre_commit/pre_commit_check_order_dockerfile_extras.py
b/scripts/ci/pre_commit/pre_commit_check_order_dockerfile_extras.py
index b3a9683280..37b41fc148 100755
--- a/scripts/ci/pre_commit/pre_commit_check_order_dockerfile_extras.py
+++ b/scripts/ci/pre_commit/pre_commit_check_order_dockerfile_extras.py
@@ -28,7 +28,7 @@ from pathlib import Path
from rich import print
-errors = []
+errors: list[str] = []
MY_DIR_PATH = Path(__file__).parent.resolve()
diff --git a/scripts/ci/pre_commit/pre_commit_check_order_setup.py
b/scripts/ci/pre_commit/pre_commit_check_order_setup.py
index ffcb0e0e80..05b8136e4b 100755
--- a/scripts/ci/pre_commit/pre_commit_check_order_setup.py
+++ b/scripts/ci/pre_commit/pre_commit_check_order_setup.py
@@ -30,7 +30,7 @@ from os.path import abspath, dirname
from rich import print
-errors = []
+errors: list[str] = []
MY_DIR_PATH = os.path.dirname(__file__)
SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir,
os.pardir, os.pardir))
diff --git
a/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py
b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py
index c8adab41b4..5765b5a793 100755
--- a/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py
+++ b/scripts/ci/pre_commit/pre_commit_check_system_tests_hidden_in_index.py
@@ -19,6 +19,7 @@ from __future__ import annotations
import sys
from pathlib import Path
+from typing import Any
from rich.console import Console
@@ -37,7 +38,7 @@ DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs"
PREFIX = "apache-airflow-providers-"
-errors = []
+errors: list[Any] = []
def check_system_test_entry_hidden(provider_index: Path):
diff --git a/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
b/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
index 80a7f081da..357ed6c856 100755
--- a/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
+++ b/scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
@@ -36,11 +36,10 @@ if __name__ not in ("__main__", "__mp_main__"):
sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure
common_precommit_utils is imported
-from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip #
noqa E402
-from common_precommit_black_utils import black_format # isort: skip # noqa
E402
+from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip #
noqa: E402
+from common_precommit_black_utils import black_format # isort: skip # noqa:
E402
-PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT_PATH / "airflow" / "providers"
-COMMON_SQL_ROOT = PROVIDERS_ROOT / "common" / "sql"
+COMMON_SQL_ROOT = AIRFLOW_SOURCES_ROOT_PATH.joinpath("airflow", "providers",
"common", "sql")
OUT_DIR = AIRFLOW_SOURCES_ROOT_PATH / "out"
COMMON_SQL_PACKAGE_PREFIX = "airflow.providers.common.sql."
@@ -110,7 +109,7 @@ def post_process_line(stub_file_path: Path, line: str,
new_lines: list[str]) ->
:param line: line to post-process
:param new_lines: new_lines - this is where we add post-processed lines
"""
- if stub_file_path.relative_to(OUT_DIR) == Path("common") / "sql" /
"operators" / "sql.pyi":
+ if stub_file_path.parts[-4:] == ("common", "sql", "operators", "sql.pyi"):
stripped_line = line.strip()
if stripped_line.startswith("parse_boolean: Incomplete"):
# Handle Special case - historically we allow _parse_boolean to be
part of the public API,
@@ -193,7 +192,7 @@ def read_pyi_file_content(
lines_no_comments = [
line
for line in pyi_file_path.read_text(encoding="utf-8").splitlines()
- if line.strip() and not line.strip().startswith("#")
+ if not line.strip().startswith("#")
]
remove_docstring = False
lines = []
@@ -206,6 +205,8 @@ def read_pyi_file_content(
if (pyi_file_path.name == "__init__.pyi") and lines == []:
console.print(f"[yellow]Skip {pyi_file_path} as it is an empty stub
for __init__.py file")
return None
+ if lines and not lines[-1].endswith("\n"):
+ lines[-1] += "\n"
return post_process_generated_stub_file(
module_name, pyi_file_path, lines,
patch_generated_file=patch_generated_files
)
@@ -220,8 +221,8 @@ def compare_stub_files(generated_stub_path: Path,
force_override: bool) -> tuple
"""
_removals, _additions = 0, 0
rel_path = generated_stub_path.relative_to(OUT_DIR)
- stub_file_target_path = PROVIDERS_ROOT / rel_path
- module_name = "airflow.providers." +
os.fspath(rel_path.with_suffix("")).replace(os.path.sep, ".")
+ stub_file_target_path = AIRFLOW_SOURCES_ROOT_PATH / rel_path
+ module_name = os.fspath(rel_path.with_suffix("")).replace(os.path.sep, ".")
generated_pyi_content = read_pyi_file_content(
module_name, generated_stub_path, patch_generated_files=True
)
@@ -333,7 +334,11 @@ if __name__ == "__main__":
shutil.rmtree(OUT_DIR, ignore_errors=True)
subprocess.run(
- ["stubgen", *[os.fspath(path) for path in
COMMON_SQL_ROOT.rglob("**/*.py")]],
+ # Without --export-less, we end up with some _odd_ imports in the stub
file that aren't used!
+ # +from airflow import AirflowException as AirflowException
+ # +from airflow.hooks.base import BaseHook as BaseHook
+ # We could avoid this if we had __all__ defined
+ ["stubgen", "--export-less", *[os.fspath(path) for path in
COMMON_SQL_ROOT.rglob("**/*.py")]],
cwd=AIRFLOW_SOURCES_ROOT_PATH,
)
total_removals, total_additions = 0, 0
@@ -348,7 +353,7 @@ if __name__ == "__main__":
total_removals += _new_removals
total_additions += _new_additions
for target_path in COMMON_SQL_ROOT.rglob("*.pyi"):
- generated_path = OUT_DIR / target_path.relative_to(PROVIDERS_ROOT)
+ generated_path = OUT_DIR /
target_path.relative_to(AIRFLOW_SOURCES_ROOT_PATH)
if not generated_path.exists():
console.print(
f"[red]The {target_path} file is missing in generated files:. "
diff --git a/scripts/in_container/run_provider_yaml_files_check.py
b/scripts/in_container/run_provider_yaml_files_check.py
index a629c75d75..28ec9e0f02 100755
--- a/scripts/in_container/run_provider_yaml_files_check.py
+++ b/scripts/in_container/run_provider_yaml_files_check.py
@@ -56,7 +56,7 @@ PROVIDER_ISSUE_TEMPLATE_PATH = ROOT_DIR.joinpath(
)
CORE_INTEGRATIONS = ["SQL", "Local"]
-errors = []
+errors: list[str] = []
console = Console(width=400, color_system="standard")
diff --git a/setup.cfg b/setup.cfg
index 657c2ded83..00fdc06aee 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -186,6 +186,13 @@ plugins =
dev.mypy.plugin.outputs
pretty = True
show_error_codes = True
+disable_error_code =
+ # "By default the bodies of untyped functions are not checked, consider
using --check-untyped-defs"
+ annotation-unchecked
+
+[mypy-airflow.config_templates.default_webserver_config]
+# This file gets written to user installs, so lets not litter it with type
comments
+disable_error_code = var-annotated
[mypy-airflow.migrations.*]
ignore_errors = True
diff --git a/setup.py b/setup.py
index f9da876aec..84591c390c 100644
--- a/setup.py
+++ b/setup.py
@@ -338,7 +338,7 @@ mypy_dependencies = [
# TODO: upgrade to newer versions of MyPy continuously as they are released
# Make sure to upgrade the mypy version in update-common-sql-api-stubs in
.pre-commit-config.yaml
# when you upgrade it here !!!!
- "mypy==0.971",
+ "mypy==0.991",
"types-boto",
"types-certifi",
"types-croniter",
diff --git a/tests/listeners/dag_listener.py b/tests/listeners/dag_listener.py
index 826316e6a6..07f0638dd1 100644
--- a/tests/listeners/dag_listener.py
+++ b/tests/listeners/dag_listener.py
@@ -26,7 +26,9 @@ if typing.TYPE_CHECKING:
from airflow.models.dagrun import DagRun
-running, success, failure = [], [], []
+running: list[DagRun] = []
+success: list[DagRun] = []
+failure: list[DagRun] = []
@hookimpl
diff --git a/tests/listeners/full_listener.py b/tests/listeners/full_listener.py
index 32281c876f..da4f723853 100644
--- a/tests/listeners/full_listener.py
+++ b/tests/listeners/full_listener.py
@@ -17,12 +17,14 @@
# under the License.
from __future__ import annotations
+from typing import Any
+
from airflow.listeners import hookimpl
from airflow.utils.state import State
-started_component = None
-stopped_component = None
-state = []
+started_component: Any = None
+stopped_component: Any = None
+state: list[Any] = []
@hookimpl
diff --git a/tests/listeners/lifecycle_listener.py
b/tests/listeners/lifecycle_listener.py
index a626b6ae20..5fab1dc640 100644
--- a/tests/listeners/lifecycle_listener.py
+++ b/tests/listeners/lifecycle_listener.py
@@ -17,10 +17,12 @@
from __future__ import annotations
+from typing import Any
+
from airflow.listeners import hookimpl
-started_component = None
-stopped_component = None
+started_component: Any = None
+stopped_component: Any = None
@hookimpl
diff --git a/tests/listeners/partial_listener.py
b/tests/listeners/partial_listener.py
index 2f539a3c3d..b4027e2875 100644
--- a/tests/listeners/partial_listener.py
+++ b/tests/listeners/partial_listener.py
@@ -20,7 +20,7 @@ from __future__ import annotations
from airflow.listeners import hookimpl
from airflow.utils.state import State
-state = []
+state: list[State] = []
@hookimpl
diff --git a/tests/test_utils/timetables.py b/tests/test_utils/timetables.py
index d246d76d3a..f5a274e5f4 100644
--- a/tests/test_utils/timetables.py
+++ b/tests/test_utils/timetables.py
@@ -49,3 +49,12 @@ class CustomSerializationTimetable(Timetable):
@property
def summary(self):
return f"{type(self).__name__}({self.value!r})"
+
+ def validate(self) -> None:
+ pass
+
+ def infer_manual_data_interval(self, *, run_after):
+ raise NotImplementedError()
+
+ def next_dagrun_info(self, *, last_automated_data_interval, restriction):
+ raise NotImplementedError()
diff --git a/tests/www/views/test_views_connection.py
b/tests/www/views/test_views_connection.py
index 9235298022..714c87a8dc 100644
--- a/tests/www/views/test_views_connection.py
+++ b/tests/www/views/test_views_connection.py
@@ -18,6 +18,7 @@
from __future__ import annotations
import json
+from typing import Any
from unittest import mock
from unittest.mock import PropertyMock
@@ -30,7 +31,7 @@ from airflow.www.extensions import init_views
from airflow.www.views import ConnectionFormWidget, ConnectionModelView
from tests.test_utils.www import _check_last_log,
_check_last_log_masked_connection, check_content_in_response
-CONNECTION = {
+CONNECTION: dict[str, Any] = {
"conn_id": "test_conn",
"conn_type": "http",
"description": "description",
@@ -41,13 +42,8 @@ CONNECTION = {
}
-def conn_with_extra():
- CONNECTION.update(
- {
- "extra": '{"x_secret": "testsecret","y_secret": "test"}',
- }
- )
- return CONNECTION
+def conn_with_extra() -> dict[str, Any]:
+ return {**CONNECTION, "extra": '{"x_secret": "testsecret","y_secret":
"test"}'}
@pytest.fixture(autouse=True)