ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r313867143
 
 

 ##########
 File path: airflow/dag/serialization.py
 ##########
 @@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""DAG serialization with JSON."""
+
+import json
+import logging
+
+import datetime
+import dateutil.parser
+import pendulum
+
+from airflow import models
+from airflow.www.utils import get_python_source
+
+
+# JSON primitive types.
+_primitive_types = (int, bool, float, str)
+
+_datetime_types = (datetime.datetime, datetime.date, datetime.time)
+
+# Object types that are always excluded.
+# TODO(coufon): not needed if _dag_included_fields and _op_included_fields are 
customized.
+_excluded_types = (logging.Logger, models.connection.Connection, type)
+
+# Stringified DADs and operators contain exactly these fields.
+# TODO(coufon): to customize included fields and keep only necessary fields.
+_dag_included_fields = list(vars(models.DAG(dag_id='test')).keys())
+_op_included_fields = list(vars(models.BaseOperator(task_id='test')).keys()) + 
[
+    '_dag', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+# Encoding constants.
+TYPE = '__type'
+CLASS = '__class'
+VAR = '__var'
+
+# Supported types. primitives and list are not encoded.
+DAG = 'dag'
+OP = 'operator'
+DATETIME = 'datetime'
+TIMEDELTA = 'timedelta'
+TIMEZONE = 'timezone'
+DICT = 'dict'
+SET = 'set'
+TUPLE = 'tuple'
+
+# Constants.
+BASE_OPERATOR_CLASS = 'BaseOperator'
+# Serialization failure returns 'failed'.
+FAILED = 'failed'
+
+
+def _is_primitive(x):
+    """Primitive types."""
+    return x is None or isinstance(x, _primitive_types)
+
+
+def _is_excluded(x):
+    """Types excluded from serialization.
+
+    TODO(coufon): not needed if _dag_included_fields and _op_included_fields 
are customized.
+    """
+    return x is None or isinstance(x, _excluded_types)
+
+
+def _serialize_object(x, visited_dags, included_fields):
+    """Helper fn to serialize an object as a JSON dict."""
+    new_x = {}
+    for k in included_fields:
+        # None is ignored in serialized form and is added back in 
deserialization.
+        v = getattr(x, k, None)
+        if not _is_excluded(v):
+            new_x[k] = _serialize(v, visited_dags)
+    return new_x
+
+
+def _serialize_dag(x, visited_dags):
+    """Serialize a DAG."""
+    if x.dag_id in visited_dags:
+        return {TYPE: DAG, VAR: str(x.dag_id)}
+
+    new_x = {TYPE: DAG}
+    visited_dags[x.dag_id] = new_x
+    new_x[VAR] = _serialize_object(
+        x, visited_dags, included_fields=_dag_included_fields)
+    return new_x
+
+
+def _serialize_operator(x, visited_dags):
+    """Serialize an operator."""
+    return _encode(
+        _serialize_object(
+            x, visited_dags, included_fields=_op_included_fields),
+        type_=OP,
+        class_=x.__class__.__name__
+    )
+
+
+def _encode(x, type_, class_=None):
+    """Encode data by a JSON dict."""
+    return ({VAR: x, TYPE: type_} if class_ is None
+            else {VAR: x, TYPE: type_, CLASS: class_})
+
+
+def _serialize(x, visited_dags):  # pylint: disable=too-many-return-statements
+    """Helper function of depth first search for serialization.
+
+    visited_dags stores DAGs that are being stringifying for have been 
stringified,
+    for:
+      (1) preventing deadlock loop caused by task.dag, task._dag, and 
dag.parent_dag;
+      (2) replacing the fields in (1) with serialized counterparts.
+
+    The serialization protocol is:
+      (1) keeping JSON supported types: primitives, dict, list;
+      (2) encoding other types as {TYPE, 'foo', VAR, 'bar'}, the 
deserialization
+          step decode VAR according to TYPE;
+      (3) Operator has a special field CLASS to record the original class
+          name for displaying in UI.
+    """
+    try:
+        if _is_primitive(x):
+            return x
+        elif isinstance(x, dict):
+            return _encode({k: _serialize(v, visited_dags) for k, v in 
x.items()}, type_=DICT)
+        elif isinstance(x, list):
+            return [_serialize(v, visited_dags) for v in x]
+        elif isinstance(x, models.DAG):
+            return _serialize_dag(x, visited_dags)
+        elif isinstance(x, models.BaseOperator):
+            return _serialize_operator(x, visited_dags)
+        elif isinstance(x, _datetime_types):
+            return _encode(x.isoformat(), type_=DATETIME)
+        elif isinstance(x, datetime.timedelta):
+            return _encode(x.total_seconds(), type_=TIMEDELTA)
+        elif isinstance(x, pendulum.tz.Timezone):
+            return _encode(str(x.name), type_=TIMEZONE)
+        elif callable(x):
+            return str(get_python_source(x))
+        elif isinstance(x, set):
+            return _encode([_serialize(v, visited_dags) for v in x], type_=SET)
+        elif isinstance(x, tuple):
+            return _encode([_serialize(v, visited_dags) for v in x], 
type_=TUPLE)
+        else:
+            logging.debug('Cast type %s to str in serialization.', type(x))
+            return str(x)
+    except Exception:  # pylint: disable=broad-except
+        logging.warning('Failed to stringify.', exc_info=True)
+        return FAILED
 
 Review comment:
   Perhaps `airflow__serialization_failed` then?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to