http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala new file mode 100755 index 0000000..f792858 --- /dev/null +++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.json + +import java.text.SimpleDateFormat + +import org.apache.atlas.typesystem._ +import org.apache.atlas.typesystem.persistence.Id +import org.apache.atlas.typesystem.types._ +import org.json4s._ +import org.json4s.native.Serialization._ + +import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ + +object InstanceSerialization { + + case class _Id(id : String, version : Int, typeName : String) + case class _Struct(typeName : String, values : Map[String, AnyRef]) + case class _Reference(id : _Id, + typeName : String, + values : Map[String, AnyRef], + traitNames : List[String], + traits : Map[String, _Struct]) + + def Try[B](x : => B) : Option[B] = { + try { Some(x) } catch { case _ : Throwable => None } + } + + /** + * Convert a Map into + * - a Reference or + * - a Struct or + * - a Id or + * - a Java Map whose values are recursively converted. + * @param jsonMap + * @param format + */ + class InstanceJavaConversion(jsonMap : Map[String, _], format : Formats) { + + /** + * For Id, Map must contain the [[_Id]] 'typeHint' + * @return + */ + def idClass: Option[String] = { + jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). + filter(s => s == classOf[_Id].getName) + } + + /** + * validate and extract 'id' attribute from Map + * @return + */ + def id: Option[String] = { + jsonMap.get("id").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String])) + } + + /** + * validate and extract 'version' attribute from Map + * @return + */ + def version: Option[Int] = { + jsonMap.get("version").flatMap{ + case i : Int => Some(i) + case bI : BigInt => Some(bI.toInt) + case _ => None + } + } + + /** + * A Map is an Id if: + * - it has the correct [[format.typeHintFieldName]] + * - it has a 'typeName' + * - it has an 'id' + * - it has a 'version' + * @return + */ + def convertId : Option[_Id] = { + for { + refClass <- idClass; + typNm <- typeName; + i <- id; + v <- version + } yield _Id(i, v, typNm) + } + + /** + * validate and extract 'typeName' attribute from Map + * @return + */ + def typeName: Option[String] = { + jsonMap.get("typeName").flatMap(x => Try(x.asInstanceOf[String])) + } + + /** + * For Reference, Map must contain the [[_Reference]] 'typeHint' + * @return + */ + def referenceClass: Option[String] = { + jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). + filter(s => s == classOf[_Reference].getName) + } + + /** + * For Reference, Map must contain the [[_Struct]] 'typeHint' + * @return + */ + def structureClass: Option[String] = { + jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). + filter(s => s == classOf[_Struct].getName) + } + + /** + * validate and extract 'values' attribute from Map + * @return + */ + def valuesMap: Option[Map[String, AnyRef]] = { + jsonMap.get("values").flatMap(x => Try(x.asInstanceOf[Map[String, AnyRef]])) + } + + /** + * validate and extract 'traitNames' attribute from Map + * @return + */ + def traitNames: Option[Seq[String]] = { + jsonMap.get("traitNames").flatMap(x => Try(x.asInstanceOf[Seq[String]])) + } + + /** + * A Map is an Struct if: + * - it has the correct [[format.typeHintFieldName]] + * - it has a 'typeName' + * - it has a 'values' attribute + * @return + */ + def struct: Option[_Struct] = { + for { + refClass <- structureClass; + typNm <- typeName; + values <- valuesMap + } yield _Struct(typNm, values) + } + + def sequence[A](a : List[(String,Option[A])]) : Option[List[(String,A)]] = a match { + case Nil => Some(Nil) + case h :: t => { + h._2 flatMap {hh => sequence(t) map { (h._1,hh) :: _}} + } + } + + /** + * Extract and convert the traits in this Map. + * + * @return + */ + def traits: Option[Map[String, _Struct]] = { + + /** + * 1. validate and extract 'traitss' attribute from Map + * Must be a Map[String, _] + */ + val tEntry : Option[Map[String, _]] = jsonMap.get("traits").flatMap(x => Try(x.asInstanceOf[Map[String, _]])) + + + /** + * Try to convert each entry in traits Map into a _Struct + * - each entry itself must be of type Map[String, _] + * - recursively call InstanceJavaConversion on this Map to convert to a struct + */ + val x: Option[List[(String, Option[_Struct])]] = tEntry.map { tMap: Map[String, _] => + val y: Map[String, Option[_Struct]] = tMap.map { t => + val tObj: Option[_Struct] = Some(t._2).flatMap(x => Try(x.asInstanceOf[Map[String, _]])). + flatMap { traitObj: Map[String, _] => + new InstanceJavaConversion(traitObj, format).struct + } + (t._1, tObj) + } + y.toList + } + + /** + * Convert a List of Optional successes into an Option of List + */ + x flatMap (sequence(_)) map (_.toMap) + + } + + def idObject : Option[_Id] = { + val idM = jsonMap.get("id").flatMap(x => Try(x.asInstanceOf[Map[String, _]])) + idM flatMap (m => new InstanceJavaConversion(m, format).convertId) + } + + /** + * A Map is an Reference if: + * - it has the correct [[format.typeHintFieldName]] + * - it has a 'typeName' + * - it has an _Id + * - it has a 'values' attribute + * - it has 'traitNames' attribute + * - it has 'traits' attribute + * @return + */ + def reference : Option[_Reference] = { + for { + refClass <- referenceClass; + typNm <- typeName; + i <- idObject; + values <- valuesMap; + traitNms <- traitNames; + ts <- traits + } yield _Reference(i, typNm, values, traitNms.toList, ts) + } + + /** + * A Map converted to Java: + * - if Map can be materialized as a _Reference, materialize and then recursively call asJava on it. + * - if Map can be materialized as a _Struct, materialize and then recursively call asJava on it. + * - if Map can be materialized as a _Id, materialize and then recursively call asJava on it. + * - otherwise convert each value with asJava and construct as new JavaMap. + * @return + */ + def convert : Any = { + reference.map(asJava(_)(format)).getOrElse { + struct.map(asJava(_)(format)).getOrElse { + convertId.map(asJava(_)(format)).getOrElse { + jsonMap.map { t => + (t._1 -> asJava(t._2)(format)) + }.toMap.asJava + } + } + } + } + } + + def asJava(v : Any)(implicit format: Formats) : Any = v match { + case i : _Id => new Id(i.id, i.version, i.typeName) + case s : _Struct => new Struct(s.typeName, asJava(s.values).asInstanceOf[java.util.Map[String, Object]]) + case r : _Reference => { + new Referenceable(r.id.asInstanceOf[_Id].id, + r.typeName, + asJava(r.values).asInstanceOf[java.util.Map[String, Object]], + asJava(r.traitNames).asInstanceOf[java.util.List[String]], + asJava(r.traits).asInstanceOf[java.util.Map[String, IStruct]]) + } + case l : List[_] => l.map(e => asJava(e)).toList.asJava + case m : Map[_, _] if Try{m.asInstanceOf[Map[String,_]]}.isDefined => + new InstanceJavaConversion(m.asInstanceOf[Map[String,_]], format).convert + case _ => v + } + + def asScala(v : Any) : Any = v match { + case i : Id => _Id(i._getId(), i.getVersion, i.getClassName) + case r : IReferenceableInstance => { + val traits = r.getTraits.map { tName => + val t = r.getTrait(tName).asInstanceOf[IStruct] + (tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]])) + }.toMap + _Reference(asScala(r.getId).asInstanceOf[_Id], + r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]], + asScala(r.getTraits).asInstanceOf[List[String]], + traits.asInstanceOf[Map[String, _Struct]]) + } + case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]]) + case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList + case m : java.util.Map[_, _] => m.asScala.map(t => (asScala(t._1), asScala(t._2))).toMap + case _ => v + } + + val _formats = new DefaultFormats { + override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat] + override val typeHints = FullTypeHints(List(classOf[_Id], classOf[_Struct], classOf[_Reference])) + } + + def buildFormat(withBigDecimals : Boolean) = { + if (withBigDecimals) + _formats + new BigDecimalSerializer + new BigIntegerSerializer + else + _formats + } + + def _toJson(value: AnyRef, withBigDecimals : Boolean = false): String = { + implicit val formats = buildFormat(withBigDecimals) + + val _s : AnyRef = asScala(value).asInstanceOf[AnyRef] + writePretty(_s) + } + + def toJson(value: IStruct, withBigDecimals : Boolean = false): String = { + _toJson(value, withBigDecimals) + } + + def fromJsonStruct(jsonStr: String, withBigDecimals : Boolean = false): Struct = { + implicit val formats = buildFormat(withBigDecimals) + val _s = read[_Struct](jsonStr) + asJava(_s).asInstanceOf[Struct] + } + + //def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = false): String = _toJson(value, withBigDecimals) + def fromJsonReferenceable(jsonStr: String, withBigDecimals : Boolean = false): Referenceable = { + implicit val formats = buildFormat(withBigDecimals) + val _s = read[_Reference](jsonStr) + asJava(_s).asInstanceOf[Referenceable] + } +}
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala new file mode 100755 index 0000000..e38772d --- /dev/null +++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala @@ -0,0 +1,303 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.json + +import org.apache.atlas.typesystem._ +import org.apache.atlas.typesystem.persistence.{Id, ReferenceableInstance, StructInstance} +import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory} +import org.apache.atlas.typesystem.types._ +import org.json4s.JsonAST.JInt +import org.json4s._ +import org.json4s.native.Serialization._ + +import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ + +class BigDecimalSerializer extends CustomSerializer[java.math.BigDecimal](format => ( { + case JDecimal(e) => e.bigDecimal +}, { + case e: java.math.BigDecimal => JDecimal(new BigDecimal(e)) +} + )) + +class BigIntegerSerializer extends CustomSerializer[java.math.BigInteger](format => ( { + case JInt(e) => e.bigInteger +}, { + case e: java.math.BigInteger => JInt(new BigInt(e)) +} + )) + +class IdSerializer extends CustomSerializer[Id](format => ( { + case JObject(JField("id", JInt(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) + case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("id", JInt(id)) :: + JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) + case JObject(JField("id", JString(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, className) + case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("id", JString(id)) :: + JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, className) +}, { + case id: Id => JObject(JField("id", JString(id.id)), + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.className)), + JField("version", JInt(id.version))) +} + )) + +class TypedStructSerializer() extends Serializer[ITypedStruct] { + + def deserialize(implicit format: Formats) = { + case (TypeInfo(clazz, ptype), json) if classOf[ITypedStruct].isAssignableFrom(clazz) => json match { + case JObject(fs) => + val (typ, fields) = fs.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME) + val typName = typ(0)._2.asInstanceOf[JString].s + val sT = typSystem.getDataType( + classOf[IConstructableType[IStruct, ITypedStruct]], typName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]] + val s = sT.createInstance() + Serialization.deserializeFields(typSystem, sT, s, fields) + s + case x => throw new MappingException("Can't convert " + x + " to TypedStruct") + } + + } + + def typSystem = TypeSystem.getInstance() + + /** + * Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. + * match the builtin conversion for BigDecimal. + * See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q + */ + //implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = new BigInt(x) + + def serialize(implicit format: Formats) = { + case e: ITypedStruct => + val fields = Serialization.serializeFields(e) + JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields) + } +} + +class TypedReferenceableInstanceSerializer() + extends Serializer[ITypedReferenceableInstance] { + + def deserialize(implicit format: Formats) = { + case (TypeInfo(clazz, ptype), json) if classOf[ITypedReferenceableInstance].isAssignableFrom(clazz) => json match { + case JObject(JField("id", JInt(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) + case JObject(JField("id", JString(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JInt(version)) :: Nil) => new Id(id, version.toInt, className) + case JObject(fs) => + var typField: Option[JField] = None + var idField: Option[JField] = None + var traitsField: Option[JField] = None + var fields: List[JField] = Nil + + fs.foreach { f: JField => f._1 match { + case Serialization.STRUCT_TYPE_FIELD_NAME => typField = Some(f) + case Serialization.ID_TYPE_FIELD_NAME => idField = Some(f) + case Serialization.TRAIT_TYPE_FIELD_NAME => traitsField = Some(f) + case _ => fields = fields :+ f + } + } + + var traitNames: List[String] = Nil + + traitsField.map { t => + val tObj: JObject = t._2.asInstanceOf[JObject] + tObj.obj.foreach { oTrait => + val tName: String = oTrait._1 + traitNames = traitNames :+ tName + } + } + + val typName = typField.get._2.asInstanceOf[JString].s + val sT = typSystem.getDataType( + classOf[ClassType], typName).asInstanceOf[ClassType] + val id = Serialization.deserializeId(idField.get._2) + val s = sT.createInstance(id, traitNames: _*) + Serialization.deserializeFields(typSystem, sT, s, fields) + + traitsField.map { t => + val tObj: JObject = t._2.asInstanceOf[JObject] + tObj.obj.foreach { oTrait => + val tName: String = oTrait._1 + val traitJObj: JObject = oTrait._2.asInstanceOf[JObject] + val traitObj = s.getTrait(tName).asInstanceOf[ITypedStruct] + val tT = typSystem.getDataType( + classOf[TraitType], traitObj.getTypeName).asInstanceOf[TraitType] + val (tTyp, tFields) = traitJObj.obj.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME) + Serialization.deserializeFields(typSystem, tT, traitObj, tFields) + } + } + + s + case x => throw new MappingException("Can't convert " + x + " to TypedStruct") + } + + } + + def typSystem = TypeSystem.getInstance() + + def serialize(implicit format: Formats) = { + case id: Id => Serialization.serializeId(id) + case e: ITypedReferenceableInstance => + val idJ = JField(Serialization.ID_TYPE_FIELD_NAME, Serialization.serializeId(e.getId)) + var fields = Serialization.serializeFields(e) + val traitsJ: List[JField] = e.getTraits.map(tName => JField(tName, Extraction.decompose(e.getTrait(tName)))).toList + + fields = idJ :: fields + if (traitsJ.size > 0) { + fields = fields :+ JField(Serialization.TRAIT_TYPE_FIELD_NAME, JObject(traitsJ: _*)) + } + + JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields) + } +} + + +object Serialization { + val STRUCT_TYPE_FIELD_NAME = "$typeName$" + val ID_TYPE_FIELD_NAME = "$id$" + val TRAIT_TYPE_FIELD_NAME = "$traits$" + + def extractList(lT: ArrayType, value: JArray)(implicit format: Formats): Any = { + val dT = lT.getElemType + value.arr.map(extract(dT, _)).asJava + } + + def extractMap(mT: MapType, value: JObject)(implicit format: Formats): Any = { + val kT = mT.getKeyType + val vT = mT.getValueType + value.obj.map { f: JField => f._1 -> extract(vT, f._2)}.toMap.asJava + } + + def extract(dT: IDataType[_], value: JValue)(implicit format: Formats): Any = value match { + case value: JBool => Extraction.extract[Boolean](value) + case value: JInt => Extraction.extract[Int](value) + case value: JDouble => Extraction.extract[Double](value) + case value: JDecimal => Extraction.extract[BigDecimal](value) + case value: JString => Extraction.extract[String](value) + case JNull => null + case value: JArray => extractList(dT.asInstanceOf[ArrayType], value.asInstanceOf[JArray]) + case value: JObject if dT.getTypeCategory eq TypeCategory.MAP => + extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject]) + case value: JObject if ((dT.getTypeCategory eq TypeCategory.STRUCT) || (dT.getTypeCategory eq TypeCategory.TRAIT)) => + Extraction.extract[ITypedStruct](value) + case value: JObject => + Extraction.extract[ITypedReferenceableInstance](value) + } + + def serializeId(id: Id) = JObject(JField("id", JString(id.id)), + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.className)), + JField("version", JInt(id.version))) + + def serializeFields(e: ITypedInstance)(implicit format: Formats) = e.fieldMapping.fields.map { + case (fName, info) => { + var v = e.get(fName) + if (v != null && (info.dataType().getTypeCategory eq TypeCategory.MAP)) { + v = v.asInstanceOf[java.util.Map[_, _]].toMap + } + + if (v != null && (info.dataType().getTypeCategory eq TypeCategory.CLASS) && !info.isComposite) { + v = v.asInstanceOf[IReferenceableInstance].getId + } + + if (v != null && (info.dataType().getTypeCategory eq TypeCategory.ENUM)) { + v = v.asInstanceOf[EnumValue].value + } + + JField(fName, Extraction.decompose(v)) + } + }.toList.map(_.asInstanceOf[JField]) + + def deserializeFields[T <: ITypedInstance](typeSystem: TypeSystem, + sT: IConstructableType[_, T], + s: T, fields: List[JField])(implicit format: Formats) + = { + //MetadataService.setCurrentService(currentMdSvc) + fields.foreach { f => + val fName = f._1 + val fInfo = sT.fieldMapping.fields(fName) + if (fInfo != null) { + //println(fName) + var v = f._2 + if (fInfo.dataType().getTypeCategory == TypeCategory.TRAIT || + fInfo.dataType().getTypeCategory == TypeCategory.STRUCT) { + v = v match { + case JObject(sFields) => + JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(fInfo.dataType.getName)) :: sFields) + case x => x + } + } + s.set(fName, Serialization.extract(fInfo.dataType(), v)) + } + } + } + + def deserializeId(value: JValue)(implicit format: Formats) = value match { + case JObject(JField("id", JInt(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) + case JObject(JField("id", JString(id)) :: + JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: + JField("version", JInt(version)) :: Nil) => new Id(id, version.toInt, className) + } + + def toJson(value: ITypedReferenceableInstance): String = { + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + write(value) + } + + def toJson(value: ITypedInstance): String = { + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + write(value) + } + + def toJsonPretty(value: ITypedReferenceableInstance): String = { + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + writePretty(value) + } + + def fromJson(jsonStr: String): ITypedReferenceableInstance = { + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + read[ReferenceableInstance](jsonStr) + } + + def traitFromJson(jsonStr: String): ITypedInstance = { + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + read[StructInstance](jsonStr) + } +} + + http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala new file mode 100755 index 0000000..eab7806 --- /dev/null +++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala @@ -0,0 +1,244 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.json + +import java.text.SimpleDateFormat + +import com.google.common.collect.ImmutableList +import org.apache.atlas.MetadataException +import org.apache.atlas.typesystem.TypesDef +import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory} +import org.apache.atlas.typesystem.types._ +import org.json4s.JsonAST.JString +import org.json4s._ +import org.json4s.native.Serialization._ + + +/** + * Module for serializing to/from Json. + * + * @example {{{ + * val j = TypesSerialization.toJson(typeSystem, "Employee", "Person", "Department", "SecurityClearance") + * + * val typesDef = TypesSerialization.fromJson(jsonStr) + * typesDef.enumTypes.foreach( typeSystem.defineEnumType(_)) + + typeSystem.defineTypes(ImmutableList.copyOf(typesDef.structTypes.toArray), + ImmutableList.copyOf(typesDef.traitTypes.toArray), + ImmutableList.copyOf(typesDef.classTypes.toArray) + ) + * }}} + * + * @todo doesn't traverse includes directives. Includes are parsed into + * [[org.apache.atlas.tools.thrift.IncludeDef IncludeDef]] structures + * but are not traversed. + * @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to + * change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on + * performance analysis. + * @todo Error reporting + */ +object TypesSerialization { + + def toJsonValue(typ: IDataType[_])(implicit formats: Formats): JValue = { + typ.getTypeCategory match { + case TypeCategory.CLASS => { + Extraction.decompose(convertClassTypeToHierarchicalTypeDefintion(typ.asInstanceOf[ClassType])) + } + case TypeCategory.STRUCT => { + Extraction.decompose(convertStructTypeToStructDef(typ.asInstanceOf[StructType])) + } + case TypeCategory.TRAIT => { + Extraction.decompose(convertTraitTypeToHierarchicalTypeDefintion(typ.asInstanceOf[TraitType])) + } + case TypeCategory.ENUM => { + Extraction.decompose(convertEnumTypeToEnumTypeDef(typ.asInstanceOf[EnumType])) + } + case _ => JString(s"${typ.getName}") + } + } + + def toJson(ts: TypeSystem, typName: String): String = { + toJson(ts, List(typName): _*) + } + + def toJson(ts: TypeSystem, typNames: String*): String = { + toJson(ts, (typ: IDataType[_]) => typNames.contains(typ.getName)) + } + + import scala.collection.JavaConversions._ + + def toJson(ts: TypeSystem, typNames: java.util.List[String]): String = { + toJson(ts, typNames.toIndexedSeq: _*) + } + + val _formats = new DefaultFormats { + override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat] + override val typeHints = NoTypeHints + } + + def toJson(ts: TypeSystem, export: IDataType[_] => Boolean): String = { + implicit val formats = _formats + new MultiplicitySerializer + + val typsDef = convertToTypesDef(ts, export) + + writePretty(typsDef) + } + + def fromJson(jsonStr: String): TypesDef = { + implicit val formats = _formats + new MultiplicitySerializer + + read[TypesDef](jsonStr) + } + + def toJson(typesDef : TypesDef) : String = { + implicit val formats = _formats + new MultiplicitySerializer + writePretty(typesDef) + + } + + def toJson(enumTypeDefinition: EnumTypeDefinition) : String = { + toJson(new TypesDef(enumTypeDefinition)) + } + + def toJson(structTypeDefinition: StructTypeDefinition) : String = { + toJson(new TypesDef(structTypeDefinition)) + } + + def toJson(typDef: HierarchicalTypeDefinition[_], isTrait : Boolean) : String = { + toJson(new TypesDef(typDef, isTrait)) + } + + private def convertAttributeInfoToAttributeDef(aInfo: AttributeInfo) = { + new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity, + aInfo.isComposite, aInfo.isUnique, aInfo.isIndexable, aInfo.reverseAttributeName) + } + + private def convertEnumTypeToEnumTypeDef(et: EnumType) = { + val eVals: Seq[EnumValue] = et.valueMap.values().toSeq + new EnumTypeDefinition(et.name, eVals: _*) + } + + private def convertStructTypeToStructDef(st: StructType): StructTypeDefinition = { + + val aDefs: Iterable[AttributeDefinition] = + st.fieldMapping.fields.values().map(convertAttributeInfoToAttributeDef(_)) + new StructTypeDefinition(st.name, aDefs.toArray) + } + + private def convertTraitTypeToHierarchicalTypeDefintion(tt: TraitType): HierarchicalTypeDefinition[TraitType] = { + + val aDefs: Iterable[AttributeDefinition] = + tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_)) + new HierarchicalTypeDefinition[TraitType](classOf[TraitType], tt.name, tt.superTypes, aDefs.toArray) + } + + private def convertClassTypeToHierarchicalTypeDefintion(tt: ClassType): HierarchicalTypeDefinition[ClassType] = { + + val aDefs: Iterable[AttributeDefinition] = + tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_)) + new HierarchicalTypeDefinition[ClassType](classOf[ClassType], tt.name, tt.superTypes, aDefs.toArray) + } + + def convertToTypesDef(ts: TypeSystem, export: IDataType[_] => Boolean): TypesDef = { + + var enumTypes: Seq[EnumTypeDefinition] = Nil + var structTypes: Seq[StructTypeDefinition] = Nil + var traitTypes: Seq[HierarchicalTypeDefinition[TraitType]] = Nil + var classTypes: Seq[HierarchicalTypeDefinition[ClassType]] = Nil + + def toTyp(nm: String) = ts.getDataType(classOf[IDataType[_]], nm) + + val typs: Iterable[IDataType[_]] = ts.getTypeNames.map(toTyp(_)).filter { (typ: IDataType[_]) => + !(ts.getCoreTypes.contains(typ.getName)) && export(typ) + } + + typs.foreach { + case typ: ArrayType => () + case typ: MapType => () + case typ: EnumType => enumTypes = enumTypes :+ convertEnumTypeToEnumTypeDef(typ) + case typ: StructType => structTypes = structTypes :+ convertStructTypeToStructDef(typ) + case typ: TraitType => traitTypes = traitTypes :+ convertTraitTypeToHierarchicalTypeDefintion(typ) + case typ: ClassType => classTypes = classTypes :+ convertClassTypeToHierarchicalTypeDefintion(typ) + } + + TypesDef(enumTypes, structTypes, traitTypes, classTypes) + } + +} + +class MultiplicitySerializer extends CustomSerializer[Multiplicity](format => ( { + case JString(m) => m match { + case "optional" => Multiplicity.OPTIONAL + case "required" => Multiplicity.REQUIRED + case "collection" => Multiplicity.COLLECTION + case "set" => Multiplicity.SET + } +}, { + case m: Multiplicity => JString(m match { + case Multiplicity.OPTIONAL => "optional" + case Multiplicity.REQUIRED => "required" + case Multiplicity.COLLECTION => "collection" + case Multiplicity.SET => "set" + } + + ) +} + )) + +trait TypeHelpers { + def requiredAttr(name: String, dataType: IDataType[_]) = + new AttributeDefinition(name, dataType.getName, Multiplicity.REQUIRED, false, null) + + def optionalAttr(name: String, dataTypeName: String) = + new AttributeDefinition(name, dataTypeName, Multiplicity.OPTIONAL, false, null) + + + def optionalAttr(name: String, dataType: IDataType[_]) = + new AttributeDefinition(name, dataType.getName, Multiplicity.OPTIONAL, false, null) + + def structDef(name: String, attrs: AttributeDefinition*) = { + new StructTypeDefinition(name, attrs.toArray) + } + + def defineTraits(ts: TypeSystem, tDefs: HierarchicalTypeDefinition[TraitType]*) = { + ts.defineTraitTypes(tDefs: _*) + } + + def createTraitTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*): + HierarchicalTypeDefinition[TraitType] = { + val sts = ImmutableList.copyOf(superTypes.toArray) + return new HierarchicalTypeDefinition[TraitType](classOf[TraitType], name, + sts, attrDefs.toArray) + } + + def createClassTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*): + HierarchicalTypeDefinition[ClassType] = { + val sts = ImmutableList.copyOf(superTypes.toArray) + return new HierarchicalTypeDefinition[ClassType](classOf[ClassType], name, + sts, attrDefs.toArray) + } + + @throws(classOf[MetadataException]) + def defineClassType(ts: TypeSystem, classDef: HierarchicalTypeDefinition[ClassType]): ClassType = { + ts.defineTypes(ImmutableList.of[StructTypeDefinition], + ImmutableList.of[HierarchicalTypeDefinition[TraitType]], + ImmutableList.of[HierarchicalTypeDefinition[ClassType]](classDef)) + return ts.getDataType(classOf[ClassType], classDef.typeName) + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/TypesDef.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/TypesDef.scala b/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/TypesDef.scala deleted file mode 100755 index f96bd31..0000000 --- a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/TypesDef.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem - -import com.google.common.collect.ImmutableList -import org.apache.hadoop.metadata.typesystem.types._ - -import scala.collection.JavaConversions - -case class TypesDef(enumTypes: Seq[EnumTypeDefinition], - structTypes: Seq[StructTypeDefinition], - traitTypes: Seq[HierarchicalTypeDefinition[TraitType]], - classTypes: Seq[HierarchicalTypeDefinition[ClassType]]) { - def this() = this(Seq(), Seq(), Seq(), Seq()) - def this(enumType : EnumTypeDefinition) = this(Seq(enumType), Seq(), Seq(), Seq()) - def this(structType: StructTypeDefinition) = this(Seq(), Seq(structType), Seq(), Seq()) - def this(typ: HierarchicalTypeDefinition[_], isTrait : Boolean) = this( - Seq(), - Seq(), - if ( isTrait ) - Seq(typ.asInstanceOf[HierarchicalTypeDefinition[TraitType]]) else Seq(), - if (!isTrait ) - Seq(typ.asInstanceOf[HierarchicalTypeDefinition[ClassType]]) else Seq() - ) - - def enumTypesAsJavaList() = { - import scala.collection.JavaConverters._ - enumTypes.asJava - } - - def structTypesAsJavaList() = { - import scala.collection.JavaConverters._ - structTypes.asJava - } - - def traitTypesAsJavaList() = { - import scala.collection.JavaConverters._ - traitTypes.asJava - } - - def classTypesAsJavaList() = { - import scala.collection.JavaConverters._ - classTypes.asJava - } - - def isEmpty() = { - enumTypes.isEmpty & structTypes.isEmpty & traitTypes.isEmpty & classTypes.isEmpty - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilder.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilder.scala b/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilder.scala deleted file mode 100644 index 1367581..0000000 --- a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilder.scala +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - - -import org.apache.hadoop.metadata.typesystem.{IReferenceableInstance, IStruct, Struct, Referenceable} -import scala.collection.JavaConversions._ -import scala.collection.JavaConverters._ -import scala.collection.mutable.ArrayBuffer -import scala.language.dynamics -import scala.language.implicitConversions -import scala.util.DynamicVariable - -class InstanceBuilder extends Dynamic { - - private val references : ArrayBuffer[Referenceable] = new ArrayBuffer[Referenceable]() - - val context = new DynamicVariable[DynamicStruct](null) - - def struct(typeName : String) : DynamicStruct = { - context.value = new DynamicStruct(this, new Struct(typeName)) - context.value - } - - def instance(typeName: String, traitNames: String*)(f : => Unit) : DynamicReference = { - val r = new Referenceable(typeName, traitNames:_*) - references.append(r) - val dr = new DynamicReference(this, r) - context.withValue(dr){f} - dr - } - - def create( f : => Unit ) : java.util.List[Referenceable] = { - f - references.asJava - } - - def applyDynamic(name : String)(value : Any) : Any = { - context.value.updateDynamic(name)(value) - } - - implicit def symbolToDynamicStruct(s : Symbol) : DynamicValue = - new DynamicValue(this, s.name, if (context.value == null) null else context.value.s) - -} - -object DynamicValue { - - private[builders] def transformOut(s: IStruct, attr : String, v : Any)(implicit ib : InstanceBuilder) : DynamicValue = - v match { - case r : Referenceable => new DynamicReference(ib, r) - case s : Struct => new DynamicStruct(ib, s) - case jL : java.util.List[_] => { - if ( s != null ) { - new DynamicCollection(ib, attr, s) - } else { - new DynamicValue(ib, attr, s, jL.map{ e => transformOut(null, null, e)}.toSeq) - } - } - case jM : java.util.Map[_,_] => { - if ( s != null ) { - new DynamicMap(ib, attr, s) - } else { - new DynamicValue(ib, attr, s, jM.map { - case (k, v) => k -> transformOut(null, null, v) - }.toMap) - } - } - case x => { - if ( s != null ) { - new DynamicValue(ib, attr, s) - } else { - new DynamicValue(ib, attr, s, x) - } - } - } - - private[builders] def transformIn(v : Any) : Any = v match { - case dr : DynamicReference => dr.r - case ds : DynamicStruct => ds.s - case dv : DynamicValue => dv.get - case l : Seq[_] => l.map{ e => transformIn(e)}.asJava - case m : Map[_,_] => m.map { - case (k,v) => k -> transformIn(v) - }.asJava - case x => x - } - -} - -class DynamicValue(val ib : InstanceBuilder, val attrName : String, val s: IStruct, var value : Any = null) extends Dynamic { - import DynamicValue._ - - implicit val iib : InstanceBuilder = ib - - def ~(v : Any): Unit = { - if ( s != null ) { - s.set(attrName, transformIn(v)) - } else { - value = v - } - } - - def get : Any = if ( s != null ) s.get(attrName) else value - - def selectDynamic(name: String) : DynamicValue = { - - throw new UnsupportedOperationException() - } - - def update(key : Any, value : Object): Unit = { - throw new UnsupportedOperationException() - } - - def apply(key : Any): DynamicValue = { - - if ( s != null && s.isInstanceOf[Referenceable] && key.isInstanceOf[String]) { - val r = s.asInstanceOf[Referenceable] - if ( r.getTraits contains attrName ) { - val traitAttr = key.asInstanceOf[String] - return new DynamicStruct(ib, r.getTrait(attrName)).selectDynamic(traitAttr) - } - } - throw new UnsupportedOperationException() - } -} - -class DynamicCollection(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) { - import DynamicValue._ - - override def update(key : Any, value : Object): Unit = { - var jL = s.get(attrName) - val idx = key.asInstanceOf[Int] - if (jL == null ) { - val l = new java.util.ArrayList[Object]() - l.ensureCapacity(idx) - jL = l - } - val nJL = new java.util.ArrayList[Object](jL.asInstanceOf[java.util.List[Object]]) - nJL.asInstanceOf[java.util.List[Object]].set(idx, transformIn(value).asInstanceOf[Object]) - s.set(attrName, nJL) - } - - override def apply(key : Any): DynamicValue = { - var jL = s.get(attrName) - val idx = key.asInstanceOf[Int] - if (jL == null ) { - null - } else { - transformOut(null, null, jL.asInstanceOf[java.util.List[Object]].get(idx)) - } - } -} - -class DynamicMap(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) { - import DynamicValue._ - override def update(key : Any, value : Object): Unit = { - var jM = s.get(attrName) - if (jM == null ) { - jM = new java.util.HashMap[Object, Object]() - } - jM.asInstanceOf[java.util.Map[Object, Object]].put(key.asInstanceOf[AnyRef], value) - } - - override def apply(key : Any): DynamicValue = { - var jM = s.get(attrName) - if (jM == null ) { - null - } else { - transformOut(null, null, jM.asInstanceOf[java.util.Map[Object, Object]].get(key)) - } - } -} - -class DynamicStruct(ib : InstanceBuilder, s: IStruct) extends DynamicValue(ib, null ,s) { - import DynamicValue._ - override def selectDynamic(name: String) : DynamicValue = { - transformOut(s, name, s.get(name)) - } - - def updateDynamic(name: String)(value: Any) { - s.set(name, transformIn(value)) - } - - override def ~(v : Any): Unit = { throw new UnsupportedOperationException()} - override def get : Any = s - -} - -class DynamicReference(ib : InstanceBuilder, val r : IReferenceableInstance) extends DynamicStruct(ib, r) { - - private def _trait(name : String) = new DynamicStruct(ib, r.getTrait(name)) - - override def selectDynamic(name: String) : DynamicValue = { - if ( r.getTraits contains name ) { - _trait(name) - } else { - super.selectDynamic(name) - } - } - -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilder.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilder.scala b/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilder.scala deleted file mode 100644 index 7ac908f..0000000 --- a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilder.scala +++ /dev/null @@ -1,188 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - -import com.google.common.collect.ImmutableList -import org.apache.hadoop.metadata.typesystem.TypesDef -import org.apache.hadoop.metadata.typesystem.types._ -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil - -import scala.collection.mutable.ArrayBuffer -import scala.language.{dynamics, implicitConversions, postfixOps} -import scala.util.DynamicVariable - -object TypesBuilder { - - case class Context(enums : ArrayBuffer[EnumTypeDefinition], - structs : ArrayBuffer[StructTypeDefinition], - classes : ArrayBuffer[HierarchicalTypeDefinition[ClassType]], - traits : ArrayBuffer[HierarchicalTypeDefinition[TraitType]], - currentTypeAttrs : ArrayBuffer[Attr] = null) - - class AttrOption() - class ReverseAttributeName(val rNm : String) extends AttrOption - class MultiplicityOption(val lower: Int, val upper: Int, val isUnique: Boolean) extends AttrOption - - val required = new AttrOption() - val optional = new AttrOption() - val collection = new AttrOption() - val set = new AttrOption() - val composite = new AttrOption() - val unique = new AttrOption() - val indexed = new AttrOption() - def reverseAttributeName(rNm : String) = new ReverseAttributeName(rNm) - def multiplicty(lower: Int, upper: Int, isUnique: Boolean) = new MultiplicityOption(lower, upper, isUnique) - - val boolean = DataTypes.BOOLEAN_TYPE.getName - val byte = DataTypes.BYTE_TYPE.getName - val short = DataTypes.SHORT_TYPE.getName - val int = DataTypes.INT_TYPE.getName - val long = DataTypes.LONG_TYPE.getName - val float = DataTypes.FLOAT_TYPE.getName - - val double = DataTypes.DOUBLE_TYPE.getName - val bigint = DataTypes.BIGINTEGER_TYPE.getName - val bigdecimal = DataTypes.BIGDECIMAL_TYPE.getName - val date = DataTypes.DATE_TYPE.getName - val string = DataTypes.STRING_TYPE.getName - - def array(t : String) : String = { - DataTypes.arrayTypeName(t) - } - - def map(kt : String, vt : String) : String = { - DataTypes.mapTypeName(kt, vt) - } - - class Attr(ctx : Context, val name : String) { - - private var dataTypeName : String = DataTypes.BOOLEAN_TYPE.getName - private var multiplicity: Multiplicity = Multiplicity.OPTIONAL - private var isComposite: Boolean = false - private var reverseAttributeName: String = null - private var isUnique: Boolean = false - private var isIndexable: Boolean = false - - ctx.currentTypeAttrs += this - - def getDef : AttributeDefinition = - new AttributeDefinition(name, dataTypeName, - multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName) - - def `~`(dT : String, options : AttrOption*) : Attr = { - dataTypeName = dT - options.foreach { o => - o match { - case `required` => {multiplicity = Multiplicity.REQUIRED} - case `optional` => {multiplicity = Multiplicity.OPTIONAL} - case `collection` => {multiplicity = Multiplicity.COLLECTION} - case `set` => {multiplicity = Multiplicity.SET} - case `composite` => {isComposite = true} - case `unique` => {isUnique = true} - case `indexed` => {isIndexable = true} - case m : MultiplicityOption => {multiplicity = new Multiplicity(m.lower, m.upper, m.isUnique)} - case r : ReverseAttributeName => {reverseAttributeName = r.rNm} - case _ => () - } - } - this - } - - } - -} - -class TypesBuilder { - - import org.apache.hadoop.metadata.typesystem.builders.TypesBuilder.{Attr, Context} - - val required = TypesBuilder.required - val optional = TypesBuilder.optional - val collection = TypesBuilder.collection - val set = TypesBuilder.set - val composite = TypesBuilder.composite - val unique = TypesBuilder.unique - val indexed = TypesBuilder.indexed - def multiplicty = TypesBuilder.multiplicty _ - def reverseAttributeName = TypesBuilder.reverseAttributeName _ - - val boolean = TypesBuilder.boolean - val byte = TypesBuilder.byte - val short = TypesBuilder.short - val int = TypesBuilder.int - val long = TypesBuilder.long - val float = TypesBuilder.float - - val double = TypesBuilder.double - val bigint = TypesBuilder.bigint - val bigdecimal = TypesBuilder.bigdecimal - val date = TypesBuilder.date - val string = TypesBuilder.string - - def array = TypesBuilder.array _ - - def map = TypesBuilder.map _ - - val context = new DynamicVariable[Context](Context(new ArrayBuffer(), - new ArrayBuffer(), - new ArrayBuffer(), - new ArrayBuffer())) - - implicit def strToAttr(s : String) = new Attr(context.value, s) - - def types(f : => Unit ) : TypesDef = { - f - TypesDef(context.value.enums.toSeq, - context.value.structs.toSeq, - context.value.traits.toSeq, - context.value.classes.toSeq) - } - - def _class(name : String, superTypes : List[String] = List())(f : => Unit): Unit = { - val attrs = new ArrayBuffer[Attr]() - context.withValue(context.value.copy(currentTypeAttrs = attrs)){f} - context.value.classes += - TypesUtil.createClassTypeDef(name, ImmutableList.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*) - } - - def _trait(name : String, superTypes : List[String] = List())(f : => Unit): Unit = { - val attrs = new ArrayBuffer[Attr]() - context.withValue(context.value.copy(currentTypeAttrs = attrs)){f} - context.value.traits += - TypesUtil.createTraitTypeDef(name, ImmutableList.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*) - val v = context.value - v.traits.size - } - - def struct(name : String)(f : => Unit): Unit = { - val attrs = new ArrayBuffer[Attr]() - context.withValue(context.value.copy(currentTypeAttrs = attrs)){f} - context.value.structs += - new StructTypeDefinition(name, attrs.map(_.getDef).toArray) - } - - def enum(name : String, values : String*) : Unit = { - val enums = values.zipWithIndex.map{ case (v, i) => - new EnumValue(v,i) - } - context.value.enums += - TypesUtil.createEnumTypeDef(name, enums:_*) - } - -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/InstanceSerialization.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/InstanceSerialization.scala b/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/InstanceSerialization.scala deleted file mode 100755 index 5909129..0000000 --- a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/InstanceSerialization.scala +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.json - -import java.text.SimpleDateFormat - -import org.apache.hadoop.metadata.typesystem._ -import org.apache.hadoop.metadata.typesystem.persistence.Id -import org.apache.hadoop.metadata.typesystem.types._ -import org.json4s._ -import org.json4s.native.Serialization._ - -import scala.collection.JavaConversions._ -import scala.collection.JavaConverters._ - -object InstanceSerialization { - - case class _Id(id : String, version : Int, typeName : String) - case class _Struct(typeName : String, values : Map[String, AnyRef]) - case class _Reference(id : _Id, - typeName : String, - values : Map[String, AnyRef], - traitNames : List[String], - traits : Map[String, _Struct]) - - def Try[B](x : => B) : Option[B] = { - try { Some(x) } catch { case _ : Throwable => None } - } - - /** - * Convert a Map into - * - a Reference or - * - a Struct or - * - a Id or - * - a Java Map whose values are recursively converted. - * @param jsonMap - * @param format - */ - class InstanceJavaConversion(jsonMap : Map[String, _], format : Formats) { - - /** - * For Id, Map must contain the [[_Id]] 'typeHint' - * @return - */ - def idClass: Option[String] = { - jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). - filter(s => s == classOf[_Id].getName) - } - - /** - * validate and extract 'id' attribute from Map - * @return - */ - def id: Option[String] = { - jsonMap.get("id").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String])) - } - - /** - * validate and extract 'version' attribute from Map - * @return - */ - def version: Option[Int] = { - jsonMap.get("version").flatMap{ - case i : Int => Some(i) - case bI : BigInt => Some(bI.toInt) - case _ => None - } - } - - /** - * A Map is an Id if: - * - it has the correct [[format.typeHintFieldName]] - * - it has a 'typeName' - * - it has an 'id' - * - it has a 'version' - * @return - */ - def convertId : Option[_Id] = { - for { - refClass <- idClass; - typNm <- typeName; - i <- id; - v <- version - } yield _Id(i, v, typNm) - } - - /** - * validate and extract 'typeName' attribute from Map - * @return - */ - def typeName: Option[String] = { - jsonMap.get("typeName").flatMap(x => Try(x.asInstanceOf[String])) - } - - /** - * For Reference, Map must contain the [[_Reference]] 'typeHint' - * @return - */ - def referenceClass: Option[String] = { - jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). - filter(s => s == classOf[_Reference].getName) - } - - /** - * For Reference, Map must contain the [[_Struct]] 'typeHint' - * @return - */ - def structureClass: Option[String] = { - jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])). - filter(s => s == classOf[_Struct].getName) - } - - /** - * validate and extract 'values' attribute from Map - * @return - */ - def valuesMap: Option[Map[String, AnyRef]] = { - jsonMap.get("values").flatMap(x => Try(x.asInstanceOf[Map[String, AnyRef]])) - } - - /** - * validate and extract 'traitNames' attribute from Map - * @return - */ - def traitNames: Option[Seq[String]] = { - jsonMap.get("traitNames").flatMap(x => Try(x.asInstanceOf[Seq[String]])) - } - - /** - * A Map is an Struct if: - * - it has the correct [[format.typeHintFieldName]] - * - it has a 'typeName' - * - it has a 'values' attribute - * @return - */ - def struct: Option[_Struct] = { - for { - refClass <- structureClass; - typNm <- typeName; - values <- valuesMap - } yield _Struct(typNm, values) - } - - def sequence[A](a : List[(String,Option[A])]) : Option[List[(String,A)]] = a match { - case Nil => Some(Nil) - case h :: t => { - h._2 flatMap {hh => sequence(t) map { (h._1,hh) :: _}} - } - } - - /** - * Extract and convert the traits in this Map. - * - * @return - */ - def traits: Option[Map[String, _Struct]] = { - - /** - * 1. validate and extract 'traitss' attribute from Map - * Must be a Map[String, _] - */ - val tEntry : Option[Map[String, _]] = jsonMap.get("traits").flatMap(x => Try(x.asInstanceOf[Map[String, _]])) - - - /** - * Try to convert each entry in traits Map into a _Struct - * - each entry itself must be of type Map[String, _] - * - recursively call InstanceJavaConversion on this Map to convert to a struct - */ - val x: Option[List[(String, Option[_Struct])]] = tEntry.map { tMap: Map[String, _] => - val y: Map[String, Option[_Struct]] = tMap.map { t => - val tObj: Option[_Struct] = Some(t._2).flatMap(x => Try(x.asInstanceOf[Map[String, _]])). - flatMap { traitObj: Map[String, _] => - new InstanceJavaConversion(traitObj, format).struct - } - (t._1, tObj) - } - y.toList - } - - /** - * Convert a List of Optional successes into an Option of List - */ - x flatMap (sequence(_)) map (_.toMap) - - } - - def idObject : Option[_Id] = { - val idM = jsonMap.get("id").flatMap(x => Try(x.asInstanceOf[Map[String, _]])) - idM flatMap (m => new InstanceJavaConversion(m, format).convertId) - } - - /** - * A Map is an Reference if: - * - it has the correct [[format.typeHintFieldName]] - * - it has a 'typeName' - * - it has an _Id - * - it has a 'values' attribute - * - it has 'traitNames' attribute - * - it has 'traits' attribute - * @return - */ - def reference : Option[_Reference] = { - for { - refClass <- referenceClass; - typNm <- typeName; - i <- idObject; - values <- valuesMap; - traitNms <- traitNames; - ts <- traits - } yield _Reference(i, typNm, values, traitNms.toList, ts) - } - - /** - * A Map converted to Java: - * - if Map can be materialized as a _Reference, materialize and then recursively call asJava on it. - * - if Map can be materialized as a _Struct, materialize and then recursively call asJava on it. - * - if Map can be materialized as a _Id, materialize and then recursively call asJava on it. - * - otherwise convert each value with asJava and construct as new JavaMap. - * @return - */ - def convert : Any = { - reference.map(asJava(_)(format)).getOrElse { - struct.map(asJava(_)(format)).getOrElse { - convertId.map(asJava(_)(format)).getOrElse { - jsonMap.map { t => - (t._1 -> asJava(t._2)(format)) - }.toMap.asJava - } - } - } - } - } - - def asJava(v : Any)(implicit format: Formats) : Any = v match { - case i : _Id => new Id(i.id, i.version, i.typeName) - case s : _Struct => new Struct(s.typeName, asJava(s.values).asInstanceOf[java.util.Map[String, Object]]) - case r : _Reference => { - new Referenceable(r.id.asInstanceOf[_Id].id, - r.typeName, - asJava(r.values).asInstanceOf[java.util.Map[String, Object]], - asJava(r.traitNames).asInstanceOf[java.util.List[String]], - asJava(r.traits).asInstanceOf[java.util.Map[String, IStruct]]) - } - case l : List[_] => l.map(e => asJava(e)).toList.asJava - case m : Map[_, _] if Try{m.asInstanceOf[Map[String,_]]}.isDefined => - new InstanceJavaConversion(m.asInstanceOf[Map[String,_]], format).convert - case _ => v - } - - def asScala(v : Any) : Any = v match { - case i : Id => _Id(i._getId(), i.getVersion, i.getClassName) - case r : IReferenceableInstance => { - val traits = r.getTraits.map { tName => - val t = r.getTrait(tName).asInstanceOf[IStruct] - (tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]])) - }.toMap - _Reference(asScala(r.getId).asInstanceOf[_Id], - r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]], - asScala(r.getTraits).asInstanceOf[List[String]], - traits.asInstanceOf[Map[String, _Struct]]) - } - case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]]) - case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList - case m : java.util.Map[_, _] => m.asScala.map(t => (asScala(t._1), asScala(t._2))).toMap - case _ => v - } - - val _formats = new DefaultFormats { - override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat] - override val typeHints = FullTypeHints(List(classOf[_Id], classOf[_Struct], classOf[_Reference])) - } - - def buildFormat(withBigDecimals : Boolean) = { - if (withBigDecimals) - _formats + new BigDecimalSerializer + new BigIntegerSerializer - else - _formats - } - - def _toJson(value: AnyRef, withBigDecimals : Boolean = false): String = { - implicit val formats = buildFormat(withBigDecimals) - - val _s : AnyRef = asScala(value).asInstanceOf[AnyRef] - writePretty(_s) - } - - def toJson(value: IStruct, withBigDecimals : Boolean = false): String = { - _toJson(value, withBigDecimals) - } - - def fromJsonStruct(jsonStr: String, withBigDecimals : Boolean = false): Struct = { - implicit val formats = buildFormat(withBigDecimals) - val _s = read[_Struct](jsonStr) - asJava(_s).asInstanceOf[Struct] - } - - //def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = false): String = _toJson(value, withBigDecimals) - def fromJsonReferenceable(jsonStr: String, withBigDecimals : Boolean = false): Referenceable = { - implicit val formats = buildFormat(withBigDecimals) - val _s = read[_Reference](jsonStr) - asJava(_s).asInstanceOf[Referenceable] - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/Serialization.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/Serialization.scala b/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/Serialization.scala deleted file mode 100755 index 7e5a634..0000000 --- a/typesystem/src/main/scala/org/apache/hadoop/metadata/typesystem/json/Serialization.scala +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.json - -import org.apache.hadoop.metadata.typesystem._ -import org.apache.hadoop.metadata.typesystem.persistence.{Id, ReferenceableInstance, StructInstance} -import org.apache.hadoop.metadata.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory} -import org.apache.hadoop.metadata.typesystem.types._ -import org.json4s.JsonAST.JInt -import org.json4s._ -import org.json4s.native.Serialization._ - -import scala.collection.JavaConversions._ -import scala.collection.JavaConverters._ - -class BigDecimalSerializer extends CustomSerializer[java.math.BigDecimal](format => ( { - case JDecimal(e) => e.bigDecimal -}, { - case e: java.math.BigDecimal => JDecimal(new BigDecimal(e)) -} - )) - -class BigIntegerSerializer extends CustomSerializer[java.math.BigInteger](format => ( { - case JInt(e) => e.bigInteger -}, { - case e: java.math.BigInteger => JInt(new BigInt(e)) -} - )) - -class IdSerializer extends CustomSerializer[Id](format => ( { - case JObject(JField("id", JInt(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) - case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("id", JInt(id)) :: - JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) - case JObject(JField("id", JString(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, className) - case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("id", JString(id)) :: - JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, className) -}, { - case id: Id => JObject(JField("id", JString(id.id)), - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.className)), - JField("version", JInt(id.version))) -} - )) - -class TypedStructSerializer() extends Serializer[ITypedStruct] { - - def deserialize(implicit format: Formats) = { - case (TypeInfo(clazz, ptype), json) if classOf[ITypedStruct].isAssignableFrom(clazz) => json match { - case JObject(fs) => - val (typ, fields) = fs.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME) - val typName = typ(0)._2.asInstanceOf[JString].s - val sT = typSystem.getDataType( - classOf[IConstructableType[IStruct, ITypedStruct]], typName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]] - val s = sT.createInstance() - Serialization.deserializeFields(typSystem, sT, s, fields) - s - case x => throw new MappingException("Can't convert " + x + " to TypedStruct") - } - - } - - def typSystem = TypeSystem.getInstance() - - /** - * Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. - * match the builtin conversion for BigDecimal. - * See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q - */ - //implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = new BigInt(x) - - def serialize(implicit format: Formats) = { - case e: ITypedStruct => - val fields = Serialization.serializeFields(e) - JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields) - } -} - -class TypedReferenceableInstanceSerializer() - extends Serializer[ITypedReferenceableInstance] { - - def deserialize(implicit format: Formats) = { - case (TypeInfo(clazz, ptype), json) if classOf[ITypedReferenceableInstance].isAssignableFrom(clazz) => json match { - case JObject(JField("id", JInt(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) - case JObject(JField("id", JString(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JInt(version)) :: Nil) => new Id(id, version.toInt, className) - case JObject(fs) => - var typField: Option[JField] = None - var idField: Option[JField] = None - var traitsField: Option[JField] = None - var fields: List[JField] = Nil - - fs.foreach { f: JField => f._1 match { - case Serialization.STRUCT_TYPE_FIELD_NAME => typField = Some(f) - case Serialization.ID_TYPE_FIELD_NAME => idField = Some(f) - case Serialization.TRAIT_TYPE_FIELD_NAME => traitsField = Some(f) - case _ => fields = fields :+ f - } - } - - var traitNames: List[String] = Nil - - traitsField.map { t => - val tObj: JObject = t._2.asInstanceOf[JObject] - tObj.obj.foreach { oTrait => - val tName: String = oTrait._1 - traitNames = traitNames :+ tName - } - } - - val typName = typField.get._2.asInstanceOf[JString].s - val sT = typSystem.getDataType( - classOf[ClassType], typName).asInstanceOf[ClassType] - val id = Serialization.deserializeId(idField.get._2) - val s = sT.createInstance(id, traitNames: _*) - Serialization.deserializeFields(typSystem, sT, s, fields) - - traitsField.map { t => - val tObj: JObject = t._2.asInstanceOf[JObject] - tObj.obj.foreach { oTrait => - val tName: String = oTrait._1 - val traitJObj: JObject = oTrait._2.asInstanceOf[JObject] - val traitObj = s.getTrait(tName).asInstanceOf[ITypedStruct] - val tT = typSystem.getDataType( - classOf[TraitType], traitObj.getTypeName).asInstanceOf[TraitType] - val (tTyp, tFields) = traitJObj.obj.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME) - Serialization.deserializeFields(typSystem, tT, traitObj, tFields) - } - } - - s - case x => throw new MappingException("Can't convert " + x + " to TypedStruct") - } - - } - - def typSystem = TypeSystem.getInstance() - - def serialize(implicit format: Formats) = { - case id: Id => Serialization.serializeId(id) - case e: ITypedReferenceableInstance => - val idJ = JField(Serialization.ID_TYPE_FIELD_NAME, Serialization.serializeId(e.getId)) - var fields = Serialization.serializeFields(e) - val traitsJ: List[JField] = e.getTraits.map(tName => JField(tName, Extraction.decompose(e.getTrait(tName)))).toList - - fields = idJ :: fields - if (traitsJ.size > 0) { - fields = fields :+ JField(Serialization.TRAIT_TYPE_FIELD_NAME, JObject(traitsJ: _*)) - } - - JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields) - } -} - - -object Serialization { - val STRUCT_TYPE_FIELD_NAME = "$typeName$" - val ID_TYPE_FIELD_NAME = "$id$" - val TRAIT_TYPE_FIELD_NAME = "$traits$" - - def extractList(lT: ArrayType, value: JArray)(implicit format: Formats): Any = { - val dT = lT.getElemType - value.arr.map(extract(dT, _)).asJava - } - - def extractMap(mT: MapType, value: JObject)(implicit format: Formats): Any = { - val kT = mT.getKeyType - val vT = mT.getValueType - value.obj.map { f: JField => f._1 -> extract(vT, f._2)}.toMap.asJava - } - - def extract(dT: IDataType[_], value: JValue)(implicit format: Formats): Any = value match { - case value: JBool => Extraction.extract[Boolean](value) - case value: JInt => Extraction.extract[Int](value) - case value: JDouble => Extraction.extract[Double](value) - case value: JDecimal => Extraction.extract[BigDecimal](value) - case value: JString => Extraction.extract[String](value) - case JNull => null - case value: JArray => extractList(dT.asInstanceOf[ArrayType], value.asInstanceOf[JArray]) - case value: JObject if dT.getTypeCategory eq TypeCategory.MAP => - extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject]) - case value: JObject if ((dT.getTypeCategory eq TypeCategory.STRUCT) || (dT.getTypeCategory eq TypeCategory.TRAIT)) => - Extraction.extract[ITypedStruct](value) - case value: JObject => - Extraction.extract[ITypedReferenceableInstance](value) - } - - def serializeId(id: Id) = JObject(JField("id", JString(id.id)), - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.className)), - JField("version", JInt(id.version))) - - def serializeFields(e: ITypedInstance)(implicit format: Formats) = e.fieldMapping.fields.map { - case (fName, info) => { - var v = e.get(fName) - if (v != null && (info.dataType().getTypeCategory eq TypeCategory.MAP)) { - v = v.asInstanceOf[java.util.Map[_, _]].toMap - } - - if (v != null && (info.dataType().getTypeCategory eq TypeCategory.CLASS) && !info.isComposite) { - v = v.asInstanceOf[IReferenceableInstance].getId - } - - if (v != null && (info.dataType().getTypeCategory eq TypeCategory.ENUM)) { - v = v.asInstanceOf[EnumValue].value - } - - JField(fName, Extraction.decompose(v)) - } - }.toList.map(_.asInstanceOf[JField]) - - def deserializeFields[T <: ITypedInstance](typeSystem: TypeSystem, - sT: IConstructableType[_, T], - s: T, fields: List[JField])(implicit format: Formats) - = { - //MetadataService.setCurrentService(currentMdSvc) - fields.foreach { f => - val fName = f._1 - val fInfo = sT.fieldMapping.fields(fName) - if (fInfo != null) { - //println(fName) - var v = f._2 - if (fInfo.dataType().getTypeCategory == TypeCategory.TRAIT || - fInfo.dataType().getTypeCategory == TypeCategory.STRUCT) { - v = v match { - case JObject(sFields) => - JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(fInfo.dataType.getName)) :: sFields) - case x => x - } - } - s.set(fName, Serialization.extract(fInfo.dataType(), v)) - } - } - } - - def deserializeId(value: JValue)(implicit format: Formats) = value match { - case JObject(JField("id", JInt(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, className) - case JObject(JField("id", JString(id)) :: - JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(className)) :: - JField("version", JInt(version)) :: Nil) => new Id(id, version.toInt, className) - } - - def toJson(value: ITypedReferenceableInstance): String = { - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - write(value) - } - - def toJson(value: ITypedInstance): String = { - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - write(value) - } - - def toJsonPretty(value: ITypedReferenceableInstance): String = { - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - writePretty(value) - } - - def fromJson(jsonStr: String): ITypedReferenceableInstance = { - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - read[ReferenceableInstance](jsonStr) - } - - def traitFromJson(jsonStr: String): ITypedInstance = { - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - read[StructInstance](jsonStr) - } -} - -
