[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-14 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r313867143
 
 

 ##
 File path: airflow/dag/serialization.py
 ##
 @@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""DAG serialization with JSON."""
+
+import json
+import logging
+
+import datetime
+import dateutil.parser
+import pendulum
+
+from airflow import models
+from airflow.www.utils import get_python_source
+
+
+# JSON primitive types.
+_primitive_types = (int, bool, float, str)
+
+_datetime_types = (datetime.datetime, datetime.date, datetime.time)
+
+# Object types that are always excluded.
+# TODO(coufon): not needed if _dag_included_fields and _op_included_fields are 
customized.
+_excluded_types = (logging.Logger, models.connection.Connection, type)
+
+# Stringified DADs and operators contain exactly these fields.
+# TODO(coufon): to customize included fields and keep only necessary fields.
+_dag_included_fields = list(vars(models.DAG(dag_id='test')).keys())
+_op_included_fields = list(vars(models.BaseOperator(task_id='test')).keys()) + 
[
+'_dag', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+# Encoding constants.
+TYPE = '__type'
+CLASS = '__class'
+VAR = '__var'
+
+# Supported types. primitives and list are not encoded.
+DAG = 'dag'
+OP = 'operator'
+DATETIME = 'datetime'
+TIMEDELTA = 'timedelta'
+TIMEZONE = 'timezone'
+DICT = 'dict'
+SET = 'set'
+TUPLE = 'tuple'
+
+# Constants.
+BASE_OPERATOR_CLASS = 'BaseOperator'
+# Serialization failure returns 'failed'.
+FAILED = 'failed'
+
+
+def _is_primitive(x):
+"""Primitive types."""
+return x is None or isinstance(x, _primitive_types)
+
+
+def _is_excluded(x):
+"""Types excluded from serialization.
+
+TODO(coufon): not needed if _dag_included_fields and _op_included_fields 
are customized.
+"""
+return x is None or isinstance(x, _excluded_types)
+
+
+def _serialize_object(x, visited_dags, included_fields):
+"""Helper fn to serialize an object as a JSON dict."""
+new_x = {}
+for k in included_fields:
+# None is ignored in serialized form and is added back in 
deserialization.
+v = getattr(x, k, None)
+if not _is_excluded(v):
+new_x[k] = _serialize(v, visited_dags)
+return new_x
+
+
+def _serialize_dag(x, visited_dags):
+"""Serialize a DAG."""
+if x.dag_id in visited_dags:
+return {TYPE: DAG, VAR: str(x.dag_id)}
+
+new_x = {TYPE: DAG}
+visited_dags[x.dag_id] = new_x
+new_x[VAR] = _serialize_object(
+x, visited_dags, included_fields=_dag_included_fields)
+return new_x
+
+
+def _serialize_operator(x, visited_dags):
+"""Serialize an operator."""
+return _encode(
+_serialize_object(
+x, visited_dags, included_fields=_op_included_fields),
+type_=OP,
+class_=x.__class__.__name__
+)
+
+
+def _encode(x, type_, class_=None):
+"""Encode data by a JSON dict."""
+return ({VAR: x, TYPE: type_} if class_ is None
+else {VAR: x, TYPE: type_, CLASS: class_})
+
+
+def _serialize(x, visited_dags):  # pylint: disable=too-many-return-statements
+"""Helper function of depth first search for serialization.
+
+visited_dags stores DAGs that are being stringifying for have been 
stringified,
+for:
+  (1) preventing deadlock loop caused by task.dag, task._dag, and 
dag.parent_dag;
+  (2) replacing the fields in (1) with serialized counterparts.
+
+The serialization protocol is:
+  (1) keeping JSON supported types: primitives, dict, list;
+  (2) encoding other types as {TYPE, 'foo', VAR, 'bar'}, the 
deserialization
+  step decode VAR according to TYPE;
+  (3) Operator has a special field CLASS to record the original class
+  name for displaying in UI.
+"""
+try:
+if _is_primitive(x):
+return x
+elif isinstance(x, dict):
+return _encode({k: _serialize(v, visited_dags) for k, v in 
x.items()}, type_=DICT)
+elif isinstance(x, list):
+return [_serialize(v, visited_dags) for v in x]
+elif 

[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-14 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r313866659
 
 

 ##
 File path: airflow/dag/serialization.py
 ##
 @@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""DAG serialization with JSON."""
+
+import json
+import logging
+
+import datetime
+import dateutil.parser
+import pendulum
+
+from airflow import models
+from airflow.www.utils import get_python_source
+
+
+# JSON primitive types.
+_primitive_types = (int, bool, float, str)
 
 Review comment:
   Oh yes, good point. I was thinking in terms of "python primitives" :)


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312822891
 
 

 ##
 File path: airflow/dag/serialization/json_schema.py
 ##
 @@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""jsonschema for validating serialized DAG and operator."""
+
+from typing import Any, Dict, Optional
+
+from airflow.dag.serialization.enum import DagAttributeTypes as DAT, Encoding
+
+
+def make_object_schema(
+var_schema: Optional[dict] = None, type_enum: Optional[list] = None) 
-> dict:
+"""jsonschema of an encoded object.
+:param var_schema: Json schema of variable
+:param type_enum: Used to restrict a value to a fixed set of values.
+"""
+schema = {
+'type': 'object',
+'properties': {
+Encoding.TYPE.value: {'type': 'string'}
+},
+'required': [
+Encoding.TYPE.value,
+Encoding.VAR.value
+]
+}   # type: Dict[str, Any]
+if var_schema is not None:
+schema['properties'][Encoding.VAR.value] = var_schema
+
+if type_enum is not None:
+schema['properties'][Encoding.TYPE.value]['enum'] = type_enum
+
+return schema
+
+
+def make_operator_schema() -> dict:
+"""jsonschema of a serialized operator."""
+return make_object_schema(
+var_schema={
+'type': 'object',
+'properties': {
+'task_id': {'type': 'string'},
+'owner': {'type': 'string'},
+'start_date': make_object_schema(
+var_schema={'type': 'string'},
+type_enum=[DAT.DATETIME.value]),
+'trigger_rule': {'type': 'string'},
+'depends_on_past': {'type': 'boolean'},
+'wait_for_downstream': {'type': 'boolean'},
+'retries': {'type': 'number'},
+'queue': {'type': 'string'},
+'pool': {'type': 'string'},
+'retry_delay': make_object_schema(
+var_schema={'type': 'number'},
+type_enum=[DAT.TIMEDELTA.value]),
+'retry_exponential_backoff': {'type': 'boolean'},
+'params': make_object_schema(
+var_schema={'type': 'object'},
+type_enum=[DAT.DICT.value]),
+'priority_weight': {'type': 'number'},
+'weight_rule': {'type': 'string'},
+'executor_config': make_object_schema(
+var_schema={'type': 'object'},
+type_enum=[DAT.DICT.value]),
+'do_xcom_push': {'type': 'boolean'},
+# _dag field must be a dag_id.
+'_dag': make_object_schema(
+var_schema={'type': 'string'},
+type_enum=[DAT.DAG.value]),
+'ui_color': {'type': 'string'},
+'ui_fgcolor': {'type': 'string'},
+'template_fields': {
+'anyOf': [
+{
+'type': 'array',
+'items': {'type': 'string'}
+},
+make_object_schema(
+var_schema={'type': 'array'},
+type_enum=[DAT.TUPLE.value]),
+]
+}
+},
+'required': [
+'task_id', 'owner', 'start_date', '_dag',
+'ui_color', 'ui_fgcolor', 'template_fields']
+},
+type_enum=[DAT.OP.value])
+
+
+def make_dag_schema() -> dict:
+"""jsonschema of a serialized DAG."""
+return make_object_schema(
+var_schema={
+'type': 'object',
+'properties': {
+'default_args': make_object_schema(var_schema={'type': 
'object'}),
+'params': make_object_schema(var_schema={'type': 'object'}),
+'_dag_id': {'type': 'string'},
+'task_dict': make_object_schema(
+var_schema={
+'type': 'object',
+ 

[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312822137
 
 

 ##
 File path: airflow/dag/serialization/serialization.py
 ##
 @@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Utils for DAG serialization with JSON."""
+
+import datetime
+import json
+import logging
+from typing import Dict, Optional, TYPE_CHECKING, Union
+
+import dateutil.parser
+import jsonschema
 
 Review comment:
   This is a new direct dep on Airflow, and we should add it to our setup.py


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312808250
 
 

 ##
 File path: airflow/dag/serialization/serialized_baseoperator.py
 ##
 @@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Operator serialization with JSON."""
+
+from airflow.dag.serialization.enum import DagAttributeTypes as DAT
+from airflow.dag.serialization.json_schema import make_operator_schema
+from airflow.dag.serialization.serialization import Serialization
+from airflow.models import BaseOperator
+
+
+class SerializedBaseOperator(BaseOperator, Serialization):
+"""A JSON serializable representation of operator.
+
+All operators are casted to SerializedBaseOperator after deserialization.
+Class specific attributes used by UI are move to object attributes.
+"""
+_included_fields = list(vars(BaseOperator(task_id='test')).keys()) + [
+'_dag', '_task_type', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+_json_schema = make_operator_schema()
+
+def __init__(self, *args, **kwargs):
+BaseOperator.__init__(self, *args, **kwargs)
+# task_type is used by UI to display the correct class type, because 
UI only
+# receives BaseOperator from deserialized DAGs.
+self._task_type = 'BaseOperator'
+# Move class attributes into object attributes.
+self.ui_color = BaseOperator.ui_color
+self.ui_fgcolor = BaseOperator.ui_fgcolor
+self.template_fields = BaseOperator.template_fields
+
+@property
+def task_type(self) -> str:
+# Overwrites task_type of BaseOperator to use _task_type instead of
+# __class__.__name__.
+return self._task_type
+
+@task_type.setter
+def task_type(self, task_type: str):
+self._task_type = task_type
+
+@classmethod
+def serialize_operator(cls, op: BaseOperator, visited_dags: dict) -> dict:
+"""Serializes operator into a JSON object.
+"""
+serialize_op = cls._serialize_object(
+op, visited_dags, 
included_fields=SerializedBaseOperator._included_fields)
+# Adds a new task_type field to record the original operator class.
+serialize_op['_task_type'] = op.__class__.__name__
+return cls._encode(serialize_op, type_=DAT.OP)
+
+@classmethod
+def deserialize_operator(cls, encoded_op: dict, visited_dags: dict) -> 
BaseOperator:
+"""Deserializes an operator from a JSON object.
+"""
+op = SerializedBaseOperator(task_id=encoded_op['task_id'])
+cls._deserialize_object(
+encoded_op, op, SerializedBaseOperator._included_fields, 
visited_dags)
 
 Review comment:
   ```suggestion
encoded_op, op, cls._included_fields, visited_dags)
   ```


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312808057
 
 

 ##
 File path: airflow/dag/serialization/serialized_baseoperator.py
 ##
 @@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Operator serialization with JSON."""
+
+from airflow.dag.serialization.enum import DagAttributeTypes as DAT
+from airflow.dag.serialization.json_schema import make_operator_schema
+from airflow.dag.serialization.serialization import Serialization
+from airflow.models import BaseOperator
+
+
+class SerializedBaseOperator(BaseOperator, Serialization):
+"""A JSON serializable representation of operator.
+
+All operators are casted to SerializedBaseOperator after deserialization.
+Class specific attributes used by UI are move to object attributes.
+"""
+_included_fields = list(vars(BaseOperator(task_id='test')).keys()) + [
+'_dag', '_task_type', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+_json_schema = make_operator_schema()
+
+def __init__(self, *args, **kwargs):
+BaseOperator.__init__(self, *args, **kwargs)
+# task_type is used by UI to display the correct class type, because 
UI only
+# receives BaseOperator from deserialized DAGs.
+self._task_type = 'BaseOperator'
+# Move class attributes into object attributes.
+self.ui_color = BaseOperator.ui_color
+self.ui_fgcolor = BaseOperator.ui_fgcolor
+self.template_fields = BaseOperator.template_fields
+
+@property
+def task_type(self) -> str:
+# Overwrites task_type of BaseOperator to use _task_type instead of
+# __class__.__name__.
+return self._task_type
+
+@task_type.setter
+def task_type(self, task_type: str):
+self._task_type = task_type
+
+@classmethod
+def serialize_operator(cls, op: BaseOperator, visited_dags: dict) -> dict:
+"""Serializes operator into a JSON object.
+"""
+serialize_op = cls._serialize_object(
+op, visited_dags, 
included_fields=SerializedBaseOperator._included_fields)
 
 Review comment:
   ```suggestion
op, visited_dags, included_fields=cls._included_fields)
   ```


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312807862
 
 

 ##
 File path: airflow/dag/serialization/serialized_baseoperator.py
 ##
 @@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Operator serialization with JSON."""
+
+from airflow.dag.serialization.enum import DagAttributeTypes as DAT
+from airflow.dag.serialization.json_schema import make_operator_schema
+from airflow.dag.serialization.serialization import Serialization
+from airflow.models import BaseOperator
+
+
+class SerializedBaseOperator(BaseOperator, Serialization):
+"""A JSON serializable representation of operator.
+
+All operators are casted to SerializedBaseOperator after deserialization.
+Class specific attributes used by UI are move to object attributes.
+"""
+_included_fields = list(vars(BaseOperator(task_id='test')).keys()) + [
+'_dag', '_task_type', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+_json_schema = make_operator_schema()
+
+def __init__(self, *args, **kwargs):
+BaseOperator.__init__(self, *args, **kwargs)
+# task_type is used by UI to display the correct class type, because 
UI only
+# receives BaseOperator from deserialized DAGs.
+self._task_type = 'BaseOperator'
+# Move class attributes into object attributes.
+self.ui_color = BaseOperator.ui_color
+self.ui_fgcolor = BaseOperator.ui_fgcolor
+self.template_fields = BaseOperator.template_fields
+
+@property
+def task_type(self) -> str:
+# Overwrites task_type of BaseOperator to use _task_type instead of
+# __class__.__name__.
+return self._task_type
+
+@task_type.setter
+def task_type(self, task_type: str):
+self._task_type = task_type
 
 Review comment:
   This getter+setter don't do anything special, so we should just use a normal 
property/atribute of `self.task_type` and delete lines 49-57


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [airflow] ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add DAG serialization using JSON

2019-08-12 Thread GitBox
ashb commented on a change in pull request #5701: [AIRFLOW-5088][AIP-24] Add 
DAG serialization using JSON
URL: https://github.com/apache/airflow/pull/5701#discussion_r312807397
 
 

 ##
 File path: airflow/dag/serialization/serialized_baseoperator.py
 ##
 @@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Operator serialization with JSON."""
+
+from airflow.dag.serialization.enum import DagAttributeTypes as DAT
+from airflow.dag.serialization.json_schema import make_operator_schema
+from airflow.dag.serialization.serialization import Serialization
+from airflow.models import BaseOperator
+
+
+class SerializedBaseOperator(BaseOperator, Serialization):
+"""A JSON serializable representation of operator.
+
+All operators are casted to SerializedBaseOperator after deserialization.
+Class specific attributes used by UI are move to object attributes.
+"""
+_included_fields = list(vars(BaseOperator(task_id='test')).keys()) + [
+'_dag', '_task_type', 'ui_color', 'ui_fgcolor', 'template_fields']
+
+_json_schema = make_operator_schema()
+
+def __init__(self, *args, **kwargs):
+BaseOperator.__init__(self, *args, **kwargs)
 
 Review comment:
   ```suggestion
   super().__init__(*args, **kwargs)
   ```


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services