This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 75bb04bb6f Fix pydantic warning about `orm_mode` rename (#33220)
75bb04bb6f is described below
commit 75bb04bb6f37dab4083190e4a3d6436ace1cac31
Author: Ephraim Anierobi <[email protected]>
AuthorDate: Wed Aug 9 10:36:16 2023 +0100
Fix pydantic warning about `orm_mode` rename (#33220)
* Fix pydantic warning about `orm_mode` rename
Pydantic 2 renamed orm_mode to from_attributes. This was missed during the
upgrade to pydantic 2 and it gives excessive warning about the rename.
This PR fixes it
* Also rename from_orm to model_validate and use model_dump instead of dict
* Fix Pydantic 1.x compatibility
---------
Co-authored-by: Tzu-ping Chung <[email protected]>
---
airflow/serialization/pydantic/dag_run.py | 3 ++-
airflow/serialization/pydantic/dataset.py | 12 ++++++++----
airflow/serialization/pydantic/job.py | 3 ++-
airflow/serialization/pydantic/taskinstance.py | 3 ++-
airflow/serialization/serialized_objects.py | 17 +++++++++++++----
5 files changed, 27 insertions(+), 11 deletions(-)
diff --git a/airflow/serialization/pydantic/dag_run.py
b/airflow/serialization/pydantic/dag_run.py
index 1e1528eb35..834b6164ae 100644
--- a/airflow/serialization/pydantic/dag_run.py
+++ b/airflow/serialization/pydantic/dag_run.py
@@ -47,4 +47,5 @@ class DagRunPydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
diff --git a/airflow/serialization/pydantic/dataset.py
b/airflow/serialization/pydantic/dataset.py
index 659e5a1899..096bda6ddd 100644
--- a/airflow/serialization/pydantic/dataset.py
+++ b/airflow/serialization/pydantic/dataset.py
@@ -31,7 +31,8 @@ class DagScheduleDatasetReferencePydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
class TaskOutletDatasetReferencePydantic(BaseModelPydantic):
@@ -46,7 +47,8 @@ class TaskOutletDatasetReferencePydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
class DatasetPydantic(BaseModelPydantic):
@@ -65,7 +67,8 @@ class DatasetPydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
class DatasetEventPydantic(BaseModelPydantic):
@@ -83,4 +86,5 @@ class DatasetEventPydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
diff --git a/airflow/serialization/pydantic/job.py
b/airflow/serialization/pydantic/job.py
index b36a9826eb..27c8ad8ca7 100644
--- a/airflow/serialization/pydantic/job.py
+++ b/airflow/serialization/pydantic/job.py
@@ -49,4 +49,5 @@ class JobPydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
diff --git a/airflow/serialization/pydantic/taskinstance.py
b/airflow/serialization/pydantic/taskinstance.py
index 236c42c260..71d8ba576f 100644
--- a/airflow/serialization/pydantic/taskinstance.py
+++ b/airflow/serialization/pydantic/taskinstance.py
@@ -60,7 +60,8 @@ class TaskInstancePydantic(BaseModelPydantic):
class Config:
"""Make sure it deals automatically with SQLAlchemy ORM classes."""
- orm_mode = True
+ from_attributes = True
+ orm_mode = True # Pydantic 1.x compatibility.
def xcom_pull(
self,
diff --git a/airflow/serialization/serialized_objects.py
b/airflow/serialization/serialized_objects.py
index d89f2e22d4..7e403e835c 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -67,6 +67,8 @@ from airflow.utils.operator_resources import Resources
from airflow.utils.task_group import MappedTaskGroup, TaskGroup
if TYPE_CHECKING:
+ from pydantic import BaseModel
+
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
HAS_KUBERNETES: bool
@@ -479,14 +481,21 @@ class BaseSerialization:
type_=DAT.SIMPLE_TASK_INSTANCE,
)
elif use_pydantic_models and _ENABLE_AIP_44:
+
+ def _pydantic_model_dump(model_cls: type[BaseModel], var: Any) ->
dict[str, Any]:
+ try:
+ return model_cls.model_validate(var).model_dump() # type:
ignore[attr-defined]
+ except AttributeError: # Pydantic 1.x compatibility.
+ return model_cls.from_orm(var).dict() # type:
ignore[attr-defined]
+
if isinstance(var, Job):
- return cls._encode(JobPydantic.from_orm(var).dict(),
type_=DAT.BASE_JOB)
+ return cls._encode(_pydantic_model_dump(JobPydantic, var),
type_=DAT.BASE_JOB)
elif isinstance(var, TaskInstance):
- return cls._encode(TaskInstancePydantic.from_orm(var).dict(),
type_=DAT.TASK_INSTANCE)
+ return cls._encode(_pydantic_model_dump(TaskInstancePydantic,
var), type_=DAT.TASK_INSTANCE)
elif isinstance(var, DagRun):
- return cls._encode(DagRunPydantic.from_orm(var).dict(),
type_=DAT.DAG_RUN)
+ return cls._encode(_pydantic_model_dump(DagRunPydantic, var),
type_=DAT.DAG_RUN)
elif isinstance(var, Dataset):
- return cls._encode(DatasetPydantic.from_orm(var).dict(),
type_=DAT.DATA_SET)
+ return cls._encode(_pydantic_model_dump(DatasetPydantic, var),
type_=DAT.DATA_SET)
else:
return cls.default_serialization(strict, var)
else: