This is an automated email from the ASF dual-hosted git repository.
bugraoz pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new a4e2586f5dd Remove timezone dependency and update Pydantic model
serialization mode to json (#56071)
a4e2586f5dd is described below
commit a4e2586f5dd8b1f688835e552284054a94a6b846
Author: Bugra Ozturk <[email protected]>
AuthorDate: Thu Sep 25 18:59:41 2025 +0200
Remove timezone dependency and update Pydantic model serialization mode to
json (#56071)
---
airflow-ctl/pyproject.toml | 11 -----
airflow-ctl/src/airflowctl/_shared/timezones | 1 -
airflow-ctl/src/airflowctl/api/operations.py | 52 +++++++---------------
airflow-ctl/src/airflowctl/ctl/cli_config.py | 4 +-
.../tests/airflow_ctl/ctl/test_cli_config.py | 2 +-
5 files changed, 19 insertions(+), 51 deletions(-)
diff --git a/airflow-ctl/pyproject.toml b/airflow-ctl/pyproject.toml
index d91596b69cd..611ca1cf09b 100644
--- a/airflow-ctl/pyproject.toml
+++ b/airflow-ctl/pyproject.toml
@@ -38,9 +38,6 @@ dependencies = [
"structlog>=25.2.0",
"uuid6>=2024.7.10",
"tabulate>=0.9.0",
- # Start of shared timezones dependencies
- "pendulum>=3.1.0",
- # End of shared timezones dependencies
]
classifiers = [
@@ -65,9 +62,6 @@ path = "src/airflowctl/__init__.py"
[tool.hatch.build.targets.wheel]
packages = ["src/airflowctl"]
-[tool.hatch.build.targets.sdist.force-include]
-"../shared/timezones/src/airflow_shared/timezones" =
"src/airflow/sdk/_shared/timezones"
-
[tool.ruff]
extend = "../pyproject.toml"
src = ["src"]
@@ -197,8 +191,3 @@ required-version = ">=0.6.3"
[tool.uv.sources]
apache-airflow-devel-common = { workspace = true }
-
-[tool.airflow]
-shared_distributions = [
- "apache-airflow-shared-timezones",
-]
diff --git a/airflow-ctl/src/airflowctl/_shared/timezones
b/airflow-ctl/src/airflowctl/_shared/timezones
deleted file mode 120000
index 8d7034fa713..00000000000
--- a/airflow-ctl/src/airflowctl/_shared/timezones
+++ /dev/null
@@ -1 +0,0 @@
-../../../../shared/timezones/src/airflow_shared/timezones
\ No newline at end of file
diff --git a/airflow-ctl/src/airflowctl/api/operations.py
b/airflow-ctl/src/airflowctl/api/operations.py
index f84871eacd1..3475357bf41 100644
--- a/airflow-ctl/src/airflowctl/api/operations.py
+++ b/airflow-ctl/src/airflowctl/api/operations.py
@@ -128,20 +128,6 @@ def _check_flag_and_exit_if_server_response_error(func):
return wrapped
-def _date_safe_dict_from_pydantic(obj: T) -> dict:
- """
- Convert a Pydantic model to a dict to be sent to the API server.
-
- It meant to be used for any pydantic method which will be sent to API.
-
- :param obj: Pydantic model
- :return: dict
- """
- import json
-
- return json.loads(obj.model_dump_json())
-
-
class BaseOperations:
"""
Base class for operations.
@@ -204,7 +190,7 @@ class LoginOperations:
"""Login to the API server."""
try:
return LoginResponse.model_validate_json(
- self.client.post("/token/cli", json=login.model_dump()).content
+ self.client.post("/token/cli",
json=login.model_dump(mode="json")).content
)
except ServerResponseError as e:
raise e
@@ -246,9 +232,7 @@ class AssetsOperations(BaseOperations):
# Ensure extra is initialised before sent to API
if asset_event_body.extra is None:
asset_event_body.extra = {}
- self.response = self.client.post(
- "assets/events",
json=_date_safe_dict_from_pydantic(asset_event_body)
- )
+ self.response = self.client.post("assets/events",
json=asset_event_body.model_dump(mode="json"))
return
AssetEventResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -318,7 +302,7 @@ class BackfillOperations(BaseOperations):
def create(self, backfill: BackfillPostBody) -> BackfillResponse |
ServerResponseError:
"""Create a backfill."""
try:
- self.response = self.client.post("backfills",
data=_date_safe_dict_from_pydantic(backfill))
+ self.response = self.client.post("backfills",
data=backfill.model_dump(mode="json"))
return BackfillResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -326,9 +310,7 @@ class BackfillOperations(BaseOperations):
def create_dry_run(self, backfill: BackfillPostBody) -> BackfillResponse |
ServerResponseError:
"""Create a dry run backfill."""
try:
- self.response = self.client.post(
- "backfills/dry_run",
data=_date_safe_dict_from_pydantic(backfill)
- )
+ self.response = self.client.post("backfills/dry_run",
data=backfill.model_dump(mode="json"))
return BackfillResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -412,7 +394,7 @@ class ConnectionsOperations(BaseOperations):
) -> ConnectionResponse | ServerResponseError:
"""Create a connection."""
try:
- self.response = self.client.post("connections",
json=_date_safe_dict_from_pydantic(connection))
+ self.response = self.client.post("connections",
json=connection.model_dump(mode="json"))
return
ConnectionResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -420,7 +402,7 @@ class ConnectionsOperations(BaseOperations):
def bulk(self, connections: BulkBodyConnectionBody) -> BulkResponse |
ServerResponseError:
"""CRUD multiple connections."""
try:
- self.response = self.client.patch("connections",
json=_date_safe_dict_from_pydantic(connections))
+ self.response = self.client.patch("connections",
json=connections.model_dump(mode="json"))
return BulkResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -448,7 +430,7 @@ class ConnectionsOperations(BaseOperations):
"""Update a connection."""
try:
self.response = self.client.patch(
- f"connections/{connection.connection_id}",
json=_date_safe_dict_from_pydantic(connection)
+ f"connections/{connection.connection_id}",
json=connection.model_dump(mode="json")
)
return
ConnectionResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
@@ -460,9 +442,7 @@ class ConnectionsOperations(BaseOperations):
) -> ConnectionTestResponse | ServerResponseError:
"""Test a connection."""
try:
- self.response = self.client.post(
- "connections/test",
json=_date_safe_dict_from_pydantic(connection)
- )
+ self.response = self.client.post("connections/test",
json=connection.model_dump(mode="json"))
return
ConnectionTestResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -497,7 +477,7 @@ class DagsOperations(BaseOperations):
def patch(self, dag_id: str, dag_body: DAGPatchBody) -> DAGResponse |
ServerResponseError:
try:
- self.response = self.client.patch(f"dags/{dag_id}",
json=dag_body.model_dump())
+ self.response = self.client.patch(f"dags/{dag_id}",
json=dag_body.model_dump(mode="json"))
return DAGResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -581,7 +561,7 @@ class DagRunOperations(BaseOperations):
trigger_dag_run.conf = {}
try:
self.response = self.client.post(
- f"dags/{dag_id}/dagRuns",
json=_date_safe_dict_from_pydantic(trigger_dag_run)
+ f"dags/{dag_id}/dagRuns",
json=trigger_dag_run.model_dump(mode="json")
)
return DAGRunResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
@@ -617,7 +597,7 @@ class PoolsOperations(BaseOperations):
def create(self, pool: PoolBody) -> PoolResponse | ServerResponseError:
"""Create a pool."""
try:
- self.response = self.client.post("pools",
json=_date_safe_dict_from_pydantic(pool))
+ self.response = self.client.post("pools",
json=pool.model_dump(mode="json"))
return PoolResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -625,7 +605,7 @@ class PoolsOperations(BaseOperations):
def bulk(self, pools: BulkBodyPoolBody) -> BulkResponse |
ServerResponseError:
"""CRUD multiple pools."""
try:
- self.response = self.client.patch("pools",
json=_date_safe_dict_from_pydantic(pools))
+ self.response = self.client.patch("pools",
json=pools.model_dump(mode="json"))
return BulkResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -642,7 +622,7 @@ class PoolsOperations(BaseOperations):
"""Update a pool."""
try:
self.response = self.client.patch(
- f"pools/{pool_body.pool}",
json=_date_safe_dict_from_pydantic(pool_body)
+ f"pools/{pool_body.pool}",
json=pool_body.model_dump(mode="json")
)
return PoolResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
@@ -675,7 +655,7 @@ class VariablesOperations(BaseOperations):
def create(self, variable: VariableBody) -> VariableResponse |
ServerResponseError:
"""Create a variable."""
try:
- self.response = self.client.post("variables",
json=_date_safe_dict_from_pydantic(variable))
+ self.response = self.client.post("variables",
json=variable.model_dump(mode="json"))
return VariableResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -683,7 +663,7 @@ class VariablesOperations(BaseOperations):
def bulk(self, variables: BulkBodyVariableBody) -> BulkResponse |
ServerResponseError:
"""CRUD multiple variables."""
try:
- self.response = self.client.patch("variables",
json=_date_safe_dict_from_pydantic(variables))
+ self.response = self.client.patch("variables",
json=variables.model_dump(mode="json"))
return BulkResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e
@@ -700,7 +680,7 @@ class VariablesOperations(BaseOperations):
"""Update a variable."""
try:
self.response = self.client.patch(
- f"variables/{variable.key}",
json=_date_safe_dict_from_pydantic(variable)
+ f"variables/{variable.key}",
json=variable.model_dump(mode="json")
)
return VariableResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
diff --git a/airflow-ctl/src/airflowctl/ctl/cli_config.py
b/airflow-ctl/src/airflowctl/ctl/cli_config.py
index 304b4f76925..bfef454d134 100644
--- a/airflow-ctl/src/airflowctl/ctl/cli_config.py
+++ b/airflow-ctl/src/airflowctl/ctl/cli_config.py
@@ -22,6 +22,7 @@ from __future__ import annotations
import argparse
import ast
+import datetime
import getpass
import inspect
import os
@@ -36,7 +37,6 @@ import httpx
import rich
import airflowctl.api.datamodels.generated as generated_datamodels
-from airflowctl._shared.timezones.timezone import parse as parsedate
from airflowctl.api.client import NEW_API_CLIENT, Client, ClientKind,
provide_api_client
from airflowctl.api.operations import BaseOperations, ServerResponseError
from airflowctl.ctl.console_formatting import AirflowConsole
@@ -467,7 +467,7 @@ class CommandFactory:
"dict": dict,
"tuple": tuple,
"set": set,
- "datetime.datetime": parsedate,
+ "datetime.datetime": datetime.datetime,
"dict[str, typing.Any]": dict,
}
# Default to ``str`` to preserve previous behaviour for any
unrecognised
diff --git a/airflow-ctl/tests/airflow_ctl/ctl/test_cli_config.py
b/airflow-ctl/tests/airflow_ctl/ctl/test_cli_config.py
index f5694d5b4bc..c0ac022a256 100644
--- a/airflow-ctl/tests/airflow_ctl/ctl/test_cli_config.py
+++ b/airflow-ctl/tests/airflow_ctl/ctl/test_cli_config.py
@@ -225,7 +225,7 @@ class TestCommandFactory:
class BackfillsOperations(BaseOperations):
def create(self, backfill: BackfillPostBody) ->
BackfillResponse | ServerResponseError:
try:
- self.response = self.client.post("backfills",
json=_date_safe_dict_from_pydantic(backfill))
+ self.response = self.client.post("backfills",
json=backfill.model_dump(mode="json"))
return
BackfillResponse.model_validate_json(self.response.content)
except ServerResponseError as e:
raise e