http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/BaseTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/BaseTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/BaseTest.java deleted file mode 100755 index 3e39aac..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/BaseTest.java +++ /dev/null @@ -1,194 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; -import org.junit.Before; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Date; -import java.util.Map; - -public abstract class BaseTest { - - public static final String STRUCT_TYPE_1 = "t1"; - public static final String STRUCT_TYPE_2 = "t2"; - public static final String TEST_DATE = "2014-12-11T02:35:58.440Z"; - public static final long TEST_DATE_IN_LONG=1418265358440L; - - public static Struct createStruct() throws MetadataException { - StructType structType = TypeSystem.getInstance().getDataType( - StructType.class, STRUCT_TYPE_1); - Struct s = new Struct(structType.getName()); - s.set("a", 1); - s.set("b", true); - s.set("c", (byte) 1); - s.set("d", (short) 2); - s.set("e", 1); - s.set("f", 1); - s.set("g", 1L); - s.set("h", 1.0f); - s.set("i", 1.0); - s.set("j", BigInteger.valueOf(1L)); - s.set("k", new BigDecimal(1)); - s.set("l", new Date(1418265358440L)); - s.set("m", Lists.asList(1, new Integer[]{1})); - s.set("n", Lists.asList(BigDecimal.valueOf(1.1), - new BigDecimal[]{BigDecimal.valueOf(1.1)})); - Map<String, Double> hm = Maps.newHashMap(); - hm.put("a", 1.0); - hm.put("b", 2.0); - s.set("o", hm); - return s; - } - - protected final TypeSystem getTypeSystem() { - return TypeSystem.getInstance(); - } - - @Before - public void setup() throws Exception { - TypeSystem ts = TypeSystem.getInstance(); - ts.reset(); - - StructType structType = ts.defineStructType(STRUCT_TYPE_1, - true, - TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE), - TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE), - TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), - TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE), - TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE), - TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE), - TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE), - TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)), - TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), - TypesUtil.createOptionalAttrDef("o", - ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))); - System.out.println("defined structType = " + structType); - - StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2, - true, - TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2)); - System.out.println("defined recursiveStructType = " + recursiveStructType); - } - - protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) - throws MetadataException { - - return getTypeSystem().defineTraitTypes(tDefs); - } - - protected Map<String, IDataType> defineClasses( - HierarchicalTypeDefinition<ClassType>... classDefs) throws MetadataException { - return getTypeSystem().defineClassTypes(classDefs); - } - - /* - * Class Hierarchy is: - * Department(name : String, employees : Array[Person]) - * Person(name : String, department : Department, manager : Manager) - * Manager(subordinates : Array[Person]) extends Person - * - * Persons can have SecurityClearance(level : Int) clearance. - */ - protected void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException { - - HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil - .createClassTypeDef("Department", - ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", - String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true, - "department") - ); - HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil.createClassTypeDef("Person", - ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("department", - "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", - "Manager", Multiplicity.OPTIONAL, false, "subordinates") - ); - HierarchicalTypeDefinition<ClassType> managerTypeDef = - TypesUtil.createClassTypeDef("Manager", - ImmutableList.of("Person"), - new AttributeDefinition("subordinates", - String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, false, "manager") - ); - - HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = - TypesUtil.createTraitTypeDef( - "SecurityClearance", - ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE) - ); - - ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), - ImmutableList.of(securityClearanceTypeDef), - ImmutableList.of(deptTypeDef, personTypeDef, - managerTypeDef)); - - ImmutableList.of( - ts.getDataType(HierarchicalType.class, "SecurityClearance"), - ts.getDataType(ClassType.class, "Department"), - ts.getDataType(ClassType.class, "Person"), - ts.getDataType(ClassType.class, "Manager") - ); - } - - protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException { - Referenceable hrDept = new Referenceable("Department"); - Referenceable john = new Referenceable("Person"); - Referenceable jane = new Referenceable("Manager", "SecurityClearance"); - - hrDept.set("name", "hr"); - john.set("name", "John"); - john.set("department", hrDept); - jane.set("name", "Jane"); - jane.set("department", hrDept); - - john.set("manager", jane); - - hrDept.set("employees", ImmutableList.of(john, jane)); - - jane.set("subordinates", ImmutableList.of(john)); - - jane.getTrait("SecurityClearance").set("level", 1); - - ClassType deptType = ts.getDataType(ClassType.class, "Department"); - deptType.convert(hrDept, Multiplicity.REQUIRED); - - return hrDept; - } -}
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ClassTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ClassTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ClassTest.java deleted file mode 100755 index 0246b61..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ClassTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class ClassTest extends BaseTest { - - @Before - public void setup() throws Exception { - super.setup(); - } - - @Test - public void test1() throws MetadataException { - - TypeSystem ts = getTypeSystem(); - - defineDeptEmployeeTypes(ts); - Referenceable hrDept = createDeptEg1(ts); - ClassType deptType = ts.getDataType(ClassType.class, "Department"); - ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); - - - Assert.assertEquals(hrDept2.toString(), "{\n" + - "\tid : (type: Department, id: <unassigned>)\n" + - "\tname : \thr\n" + - "\temployees : \t[{\n" + - "\tid : (type: Person, id: <unassigned>)\n" + - "\tname : \tJohn\n" + - "\tdepartment : (type: Department, id: <unassigned>)\n" + - "\tmanager : (type: Manager, id: <unassigned>)\n" + - "}, {\n" + - "\tid : (type: Manager, id: <unassigned>)\n" + - "\tsubordinates : \t[{\n" + - "\tid : (type: Person, id: <unassigned>)\n" + - "\tname : \tJohn\n" + - "\tdepartment : (type: Department, id: <unassigned>)\n" + - "\tmanager : (type: Manager, id: <unassigned>)\n" + - "}]\n" + - "\tname : \tJane\n" + - "\tdepartment : (type: Department, id: <unassigned>)\n" + - "\tmanager : <null>\n" + - "\n" + - "\tSecurityClearance : \t{\n" + - "\t\tlevel : \t\t1\n" + - "\t}}]\n" + - "}"); - - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/EnumTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/EnumTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/EnumTest.java deleted file mode 100755 index a7185d8..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/EnumTest.java +++ /dev/null @@ -1,221 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.ITypedStruct; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Date; -import java.util.Map; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; - -public class EnumTest extends BaseTest { - - @Before - public void setup() throws Exception { - super.setup(); - } - - void defineEnums(TypeSystem ts) throws MetadataException { - ts.defineEnumType("HiveObjectType", - new EnumValue("GLOBAL", 1), - new EnumValue("DATABASE", 2), - new EnumValue("TABLE", 3), - new EnumValue("PARTITION", 4), - new EnumValue("COLUMN", 5)); - - ts.defineEnumType("PrincipalType", - new EnumValue("USER", 1), - new EnumValue("ROLE", 2), - new EnumValue("GROUP", 3)); - - ts.defineEnumType("TxnState", - new EnumValue("COMMITTED", 1), - new EnumValue("ABORTED", 2), - new EnumValue("OPEN", 3)); - - ts.defineEnumType("LockLevel", - new EnumValue("DB", 1), - new EnumValue("TABLE", 2), - new EnumValue("PARTITION", 3)); - - } - - protected void fillStruct(Struct s) throws MetadataException { - s.set("a", 1); - s.set("b", true); - s.set("c", (byte) 1); - s.set("d", (short) 2); - s.set("e", 1); - s.set("f", 1); - s.set("g", 1L); - s.set("h", 1.0f); - s.set("i", 1.0); - s.set("j", BigInteger.valueOf(1L)); - s.set("k", new BigDecimal(1)); - s.set("l", new Date(1418265358440L)); - s.set("m", Lists.asList(1, new Integer[]{1})); - s.set("n", - Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)})); - Map<String, Double> hm = Maps.newHashMap(); - hm.put("a", 1.0); - hm.put("b", 2.0); - s.set("o", hm); - s.set("enum1", "GLOBAL"); - s.set("enum2", 1); - s.set("enum3", "COMMITTED"); - s.set("enum4", 3); - } - - protected Struct createStructWithEnum(String typeName) throws MetadataException { - Struct s = new Struct(typeName); - fillStruct(s); - return s; - } - - protected Referenceable createInstanceWithEnum(String typeName) throws MetadataException { - Referenceable r = new Referenceable(typeName); - fillStruct(r); - return r; - } - - protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws MetadataException { - return ts.defineClassType(createClassTypeDef("t4", - ImmutableList.<String>of(), - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE), - createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")), - createOptionalAttrDef("e", DataTypes.INT_TYPE), - createOptionalAttrDef("f", DataTypes.INT_TYPE), - createOptionalAttrDef("g", DataTypes.LONG_TYPE), - createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")), - createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), - createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE), - createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE), - createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE), - createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")), - createOptionalAttrDef("l", DataTypes.DATE_TYPE), - createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)), - createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), - createOptionalAttrDef("o", - ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), - createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")))); - } - - @Test - public void testStruct() throws MetadataException { - TypeSystem ts = getTypeSystem(); - defineEnums(ts); - StructType structType = ts.defineStructType("t3", - true, - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE), - createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")), - createOptionalAttrDef("e", DataTypes.INT_TYPE), - createOptionalAttrDef("f", DataTypes.INT_TYPE), - createOptionalAttrDef("g", DataTypes.LONG_TYPE), - createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")), - createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), - createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE), - createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE), - createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE), - createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")), - createOptionalAttrDef("l", DataTypes.DATE_TYPE), - createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)), - createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), - createOptionalAttrDef("o", - ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), - createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))); - - Struct s = createStructWithEnum("t3"); - ITypedStruct typedS = structType.convert(s, Multiplicity.REQUIRED); - Assert.assertEquals(typedS.toString(), "{\n" + - "\ta : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\td : \t2\n" + - "\tenum1 : \tGLOBAL\n" + - "\te : \t1\n" + - "\tf : \t1\n" + - "\tg : \t1\n" + - "\tenum2 : \tUSER\n" + - "\th : \t1.0\n" + - "\ti : \t1.0\n" + - "\tj : \t1\n" + - "\tk : \t1\n" + - "\tenum3 : \tCOMMITTED\n" + - "\tl : \t" + TEST_DATE + "\n" + - "\tm : \t[1, 1]\n" + - "\tn : \t[1.1, 1.1]\n" + - "\to : \t{b=2.0, a=1.0}\n" + - "\tenum4 : \tPARTITION\n" + - "}"); - } - - @Test - public void testClass() throws MetadataException { - TypeSystem ts = getTypeSystem(); - defineEnums(ts); - ClassType clsType = defineClassTypeWithEnum(ts); - - IReferenceableInstance r = createInstanceWithEnum("t4"); - ITypedReferenceableInstance typedR = clsType.convert(r, Multiplicity.REQUIRED); - Assert.assertEquals(typedR.toString(), "{\n" + - "\tid : (type: t4, id: <unassigned>)\n" + - "\ta : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\td : \t2\n" + - "\tenum1 : \tGLOBAL\n" + - "\te : \t1\n" + - "\tf : \t1\n" + - "\tg : \t1\n" + - "\tenum2 : \tUSER\n" + - "\th : \t1.0\n" + - "\ti : \t1.0\n" + - "\tj : \t1\n" + - "\tk : \t1\n" + - "\tenum3 : \tCOMMITTED\n" + - "\tl : \t" + TEST_DATE + "\n" + - "\tm : \t[1, 1]\n" + - "\tn : \t[1.1, 1.1]\n" + - "\to : \t{b=2.0, a=1.0}\n" + - "\tenum4 : \tPARTITION\n" + - "}"); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/StructTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/StructTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/StructTest.java deleted file mode 100755 index 0db0a67..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/StructTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.ITypedStruct; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class StructTest extends BaseTest { - - StructType structType; - StructType recursiveStructType; - - @Before - public void setup() throws Exception { - super.setup(); - structType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1); - recursiveStructType = getTypeSystem() - .getDataType(StructType.class, STRUCT_TYPE_2); - } - - @Test - public void test1() throws MetadataException { - Struct s = createStruct(); - ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\ta : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\td : \t2\n" + - "\te : \t1\n" + - "\tf : \t1\n" + - "\tg : \t1\n" + - "\th : \t1.0\n" + - "\ti : \t1.0\n" + - "\tj : \t1\n" + - "\tk : \t1\n" + - "\tl : \t" + TEST_DATE + "\n" + - "\tm : \t[1, 1]\n" + - "\tn : \t[1.1, 1.1]\n" + - "\to : \t{b=2.0, a=1.0}\n" + - "}"); - } - - @Test - public void testRecursive() throws MetadataException { - Struct s1 = new Struct(recursiveStructType.getName()); - s1.set("a", 1); - Struct s2 = new Struct(recursiveStructType.getName()); - s2.set("a", 1); - s2.set("s", s1); - ITypedStruct ts = recursiveStructType.convert(s2, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\ta : \t1\n" + - "\ts : \t{\n" + - "\t\ta : \t\t1\n" + - "\t\ts : <null>\n" + - "\n" + - "\t}\n" + - "}"); - } - -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TraitTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TraitTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TraitTest.java deleted file mode 100755 index e08ed05..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TraitTest.java +++ /dev/null @@ -1,224 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.IStruct; -import org.apache.hadoop.metadata.typesystem.ITypedStruct; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; - -public class TraitTest extends BaseTest { - - - @Before - public void setup() throws Exception { - super.setup(); - } - - /* - * Type Hierarchy is: - * A(a,b,c,d) - * B(b) extends A - * C(c) extends A - * D(d) extends B,C - * - * - There are a total of 11 fields in an instance of D - * - an attribute that is hidden by a SubType can referenced by prefixing it with the - * complete Path. - * For e.g. the 'b' attribute in A (that is a superType for B) is hidden the 'b' attribute - * in B. - * So it is availabel by the name 'A.B.D.b' - * - * - Another way to set attributes is to cast. Casting a 'D' instance of 'B' makes the 'A.B.D - * .b' attribute - * available as 'A.B.b'. Casting one more time to an 'A' makes the 'A.B.b' attribute - * available as 'b'. - */ - @Test - public void test1() throws MetadataException { - HierarchicalTypeDefinition A = createTraitTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE)); - HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - - defineTraits(A, B, C, D); - - TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D"); - -// for(String aName : DType.fieldMapping().fields.keySet()) { -// System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType.getQualifiedName(aName))); -// } - - Map<String,String> nameToQualifiedName = new HashMap(); - { - nameToQualifiedName.put("d", "D.d"); - nameToQualifiedName.put("b", "B.b"); - nameToQualifiedName.put("c", "C.c"); - nameToQualifiedName.put("a", "A.a"); - nameToQualifiedName.put("A.B.D.b", "A.B.D.b"); - nameToQualifiedName.put("A.B.D.c", "A.B.D.c"); - nameToQualifiedName.put("A.B.D.d", "A.B.D.d"); - nameToQualifiedName.put("A.C.D.a", "A.C.D.a"); - nameToQualifiedName.put("A.C.D.b", "A.C.D.b"); - nameToQualifiedName.put("A.C.D.c", "A.C.D.c"); - nameToQualifiedName.put("A.C.D.d", "A.C.D.d"); - } - - Struct s1 = new Struct("D"); - s1.set("d", 1); - s1.set("c", 1); - s1.set("b", true); - s1.set("a", 1); - s1.set("A.B.D.b", true); - s1.set("A.B.D.c", 2); - s1.set("A.B.D.d", 2); - - s1.set("A.C.D.a", 3); - s1.set("A.C.D.b", false); - s1.set("A.C.D.c", 3); - s1.set("A.C.D.d", 3); - - - ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \ttrue\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - - /* - * cast to B and set the 'b' attribute on A. - */ - TraitType BType = (TraitType) getTypeSystem().getDataType(TraitType.class, "B"); - IStruct s2 = DType.castAs(ts, "B"); - s2.set("A.B.b", false); - - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \tfalse\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - - /* - * cast again to A and set the 'b' attribute on A. - */ - TraitType AType = (TraitType) getTypeSystem().getDataType(TraitType.class, "A"); - IStruct s3 = BType.castAs(s2, "A"); - s3.set("b", true); - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \ttrue\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - } - - @Test - public void testRandomOrder() throws MetadataException { - HierarchicalTypeDefinition A = createTraitTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE)); - HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - - defineTraits(B, D, A, C); - - TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D"); - - Struct s1 = new Struct("D"); - s1.set("d", 1); - s1.set("c", 1); - s1.set("b", true); - s1.set("a", 1); - s1.set("A.B.D.b", true); - s1.set("A.B.D.c", 2); - s1.set("A.B.D.d", 2); - - s1.set("A.C.D.a", 3); - s1.set("A.C.D.b", false); - s1.set("A.C.D.c", 3); - s1.set("A.C.D.d", 3); - - - ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \ttrue\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - - } - -} - http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeInheritanceTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeInheritanceTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeInheritanceTest.java deleted file mode 100644 index 447e1be..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeInheritanceTest.java +++ /dev/null @@ -1,256 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.typesystem.IStruct; -import org.apache.hadoop.metadata.typesystem.ITypedInstance; -import org.apache.hadoop.metadata.typesystem.ITypedStruct; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; - -/** - * Unit tests for type inheritance. - */ -public class TypeInheritanceTest extends BaseTest { - - @BeforeMethod - public void setup() throws Exception { - TypeSystem.getInstance().reset(); - super.setup(); - } - - /* - * Type Hierarchy is: - * A(a) - * B(b) extends A - */ - @Test - public void testSimpleInheritance() throws MetadataException { - HierarchicalTypeDefinition A = createClassTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE)); - - HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - - defineClasses(A, B); - - ClassType BType = getTypeSystem().getDataType(ClassType.class, "B"); - - Struct s1 = new Struct("B"); - s1.set("b", true); - s1.set("a", 1); - - ITypedInstance ts = BType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\tid : (type: B, id: <unassigned>)\n" + - "\tb : \ttrue\n" + - "\ta : \t1\n" + - "}"); - } - - /* - * Type Hierarchy is: - * A(a, b) - * B(b) extends A - */ - @Test - public void testSimpleInheritanceWithOverrides() throws MetadataException { - HierarchicalTypeDefinition A = createClassTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createRequiredAttrDef("b", DataTypes.BOOLEAN_TYPE)); - - HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - - defineClasses(A, B); - - ClassType BType = getTypeSystem().getDataType(ClassType.class, "B"); - - Struct s1 = new Struct("B"); - s1.set("b", true); - s1.set("a", 1); - s1.set("A.B.b", false); - - ITypedInstance ts = BType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\tid : (type: B, id: <unassigned>)\n" + - "\tb : \ttrue\n" + - "\ta : \t1\n" + - "\tA.B.b : \tfalse\n" + - "}"); - } - - /* - * Type Hierarchy is: - * A(a) - * B(b) extends A - * C(c) extends B - * D(d) extends C - */ - @Test - public void testMultiLevelInheritance() throws MetadataException { - HierarchicalTypeDefinition A = createClassTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE)); - - HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - - HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE)); - - HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - - defineClasses(A, B, C, D); - - ClassType DType = getTypeSystem().getDataType(ClassType.class, "D"); - - Struct s1 = new Struct("D"); - s1.set("d", 1); - s1.set("c", 1); - s1.set("b", true); - s1.set("a", 1); - - ITypedInstance ts = DType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\tid : (type: D, id: <unassigned>)\n" + - "\td : \t1\n" + - "\tc : \t1\n" + - "\tb : \ttrue\n" + - "\ta : \t1\n" + - "}"); - } - - /* - * Type Hierarchy is: - * A(a,b,c,d) - * B(b) extends A - * C(c) extends A - * D(d) extends B,C - * - * - There are a total of 11 fields in an instance of D - * - an attribute that is hidden by a SubType can referenced by prefixing it with the - * complete Path. - * For e.g. the 'b' attribute in A (that is a superType for B) is hidden the 'b' attribute - * in B. - * So it is availabel by the name 'A.B.D.b' - * - * - Another way to set attributes is to cast. Casting a 'D' instance of 'B' makes the 'A.B.D - * .b' attribute - * available as 'A.B.b'. Casting one more time to an 'A' makes the 'A.B.b' attribute - * available as 'b'. - */ - @Test - public void testDiamondInheritance() throws MetadataException { - HierarchicalTypeDefinition A = createTraitTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.of("A"), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE)); - HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.of("B", "C"), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - - defineTraits(A, B, C, D); - - TraitType DType = getTypeSystem().getDataType(TraitType.class, "D"); - - Struct s1 = new Struct("D"); - s1.set("d", 1); - s1.set("c", 1); - s1.set("b", true); - s1.set("a", 1); - s1.set("A.B.D.b", true); - s1.set("A.B.D.c", 2); - s1.set("A.B.D.d", 2); - - s1.set("A.C.D.a", 3); - s1.set("A.C.D.b", false); - s1.set("A.C.D.c", 3); - s1.set("A.C.D.d", 3); - - - ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED); - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \ttrue\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - - /* - * cast to B and set the 'b' attribute on A. - */ - TraitType BType = getTypeSystem().getDataType(TraitType.class, "B"); - IStruct s2 = DType.castAs(ts, "B"); - s2.set("A.B.b", false); - - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \tfalse\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - - /* - * cast again to A and set the 'b' attribute on A. - */ - IStruct s3 = BType.castAs(s2, "A"); - s3.set("b", true); - Assert.assertEquals(ts.toString(), "{\n" + - "\td : \t1\n" + - "\tb : \ttrue\n" + - "\tc : \t1\n" + - "\ta : \t1\n" + - "\tA.B.D.b : \ttrue\n" + - "\tA.B.D.c : \t2\n" + - "\tA.B.D.d : \t2\n" + - "\tA.C.D.a : \t3\n" + - "\tA.C.D.b : \tfalse\n" + - "\tA.C.D.c : \t3\n" + - "\tA.C.D.d : \t3\n" + - "}"); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeSystemTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeSystemTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeSystemTest.java deleted file mode 100755 index 3b9a4a5..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/TypeSystemTest.java +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import org.apache.commons.lang3.RandomStringUtils; -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; -import org.testng.Assert; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; -import scala.actors.threadpool.Arrays; - -import java.util.Collections; -import java.util.List; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; - -public class TypeSystemTest extends BaseTest { - - @BeforeClass - public void setUp() throws Exception { - super.setup(); - } - - @AfterMethod - public void tearDown() throws Exception { - getTypeSystem().reset(); - } - - @Test - public void testGetTypeNames() throws Exception { - getTypeSystem().defineEnumType("enum_test", - new EnumValue("0", 0), - new EnumValue("1", 1), - new EnumValue("2", 2), - new EnumValue("3", 3)); - Assert.assertTrue(getTypeSystem().getTypeNames().contains("enum_test")); - } - - @Test - public void testIsRegistered() throws Exception { - getTypeSystem().defineEnumType("enum_test", - new EnumValue("0", 0), - new EnumValue("1", 1), - new EnumValue("2", 2), - new EnumValue("3", 3)); - Assert.assertTrue(getTypeSystem().isRegistered("enum_test")); - } - - @Test - public void testGetTraitsNames() throws Exception { - HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = - TypesUtil.createTraitTypeDef("Classification", - ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE)); - HierarchicalTypeDefinition<TraitType> piiTrait = - TypesUtil.createTraitTypeDef("PII", ImmutableList.<String>of()); - HierarchicalTypeDefinition<TraitType> phiTrait = - TypesUtil.createTraitTypeDef("PHI", ImmutableList.<String>of()); - HierarchicalTypeDefinition<TraitType> pciTrait = - TypesUtil.createTraitTypeDef("PCI", ImmutableList.<String>of()); - HierarchicalTypeDefinition<TraitType> soxTrait = - TypesUtil.createTraitTypeDef("SOX", ImmutableList.<String>of()); - HierarchicalTypeDefinition<TraitType> secTrait = - TypesUtil.createTraitTypeDef("SEC", ImmutableList.<String>of()); - HierarchicalTypeDefinition<TraitType> financeTrait = - TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of()); - - getTypeSystem().defineTypes( - ImmutableList.<StructTypeDefinition>of(), - ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, - soxTrait, secTrait, financeTrait), - ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); - - final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT); - Assert.assertEquals(traitsNames.size(), 7); - List traits = Arrays.asList(new String[]{ - "Classification", - "PII", - "PHI", - "PCI", - "SOX", - "SEC", - "Finance", - }); - - Assert.assertFalse(Collections.disjoint(traitsNames, traits)); - } - - private String random() { - return RandomStringUtils.random(10); - } - - @Test - public void testUTFNames() throws Exception { - TypeSystem ts = getTypeSystem(); - - String enumType = random(); - EnumTypeDefinition orgLevelEnum = - new EnumTypeDefinition(enumType, new EnumValue(random(), 1), new EnumValue(random(), 2)); - ts.defineEnumType(orgLevelEnum); - - String structName = random(); - String attrType = random(); - StructTypeDefinition structType = createStructTypeDef(structName, - createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); - - String className = random(); - HierarchicalTypeDefinition<ClassType> classType = - createClassTypeDef(className, ImmutableList.<String>of(), - createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); - - String traitName = random(); - HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, - ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE)); - - ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType)); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ValidationTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ValidationTest.java b/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ValidationTest.java deleted file mode 100644 index 24a3950..0000000 --- a/typesystem/src/test/java/org/apache/hadoop/metadata/typesystem/types/ValidationTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.types; - -import com.google.common.collect.ImmutableList; -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -public class ValidationTest { - @DataProvider(name = "attributeData") - private Object[][] createAttributeData() { - return new String[][]{ - {null, "type"}, {"", "type"}, {"name", null}, {"name", ""}}; - } - - @Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class}) - public void testAttributes(String name, String type) { - TypesUtil.createRequiredAttrDef(name, type); - } - - @DataProvider(name = "enumValueData") - private Object[][] createEnumValueData() { - return new String[][]{{null}, {""}}; - } - - @Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class}) - public void testEnumValue(String name) { - new EnumValue(name, 1); - } - - @DataProvider(name = "enumTypeData") - private Object[][] createEnumTypeData() { - EnumValue value = new EnumValue("name", 1); - return new Object[][]{{null, value}, {"", value}, {"name"}}; - } - - @Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class}) - public void testEnumType(String name, EnumValue... values) { - new EnumTypeDefinition(name, values); - } - - @DataProvider(name = "structTypeData") - private Object[][] createStructTypeData() { - AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type"); - return new Object[][]{{null, value}, {"", value}, {"name"}}; - } - - @Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class}) - public void testStructType(String name, AttributeDefinition... values) { - new StructTypeDefinition(name, values); - } - - @DataProvider(name = "classTypeData") - private Object[][] createClassTypeData() { - return new Object[][]{{null}, {""}}; - } - - @Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) - public void testClassType(String name) { - AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");; - TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value); - } - - @Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) - public void testTraitType(String name) { - AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");; - TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value); - } - - @Test - public void testValidTypes() { - AttributeDefinition attribute = TypesUtil.createRequiredAttrDef("name", "type"); - - //class with no attributes - TypesUtil.createClassTypeDef("name", ImmutableList.of("super")); - - //class with no super types - TypesUtil.createClassTypeDef("name", ImmutableList.<String>of(), attribute); - - //trait with no attributes - TypesUtil.createTraitTypeDef("name", ImmutableList.of("super")); - - //trait with no super types - TypesUtil.createTraitTypeDef("name", ImmutableList.<String>of(), attribute); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala new file mode 100644 index 0000000..b6e3318 --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.builders + +import org.apache.atlas.typesystem.TypesDef +import org.apache.atlas.typesystem.types.TypeSystem +import org.scalatest.{BeforeAndAfter, FunSuite} + +abstract class BuilderTest extends FunSuite with BeforeAndAfter { + + var tDef : TypesDef = null + + before { + TypeSystem.getInstance().reset() + + val b = new TypesBuilder + import b._ + + tDef = types { + + _trait("Dimension") {} + _trait("PII") {} + _trait("Metric") {} + _trait("ETL") {} + _trait("JdbcAccess") {} + + _class("DB") { + "name" ~ (string, required, indexed, unique) + "owner" ~ (string) + "createTime" ~ (int) + } + + _class("StorageDesc") { + "inputFormat" ~ (string, required) + "outputFormat" ~ (string, required) + } + + _class("Column") { + "name" ~ (string, required) + "dataType" ~ (string, required) + "sd" ~ ("StorageDesc", required) + } + + _class("Table", List()) { + "name" ~ (string, required, indexed) + "db" ~ ("DB", required) + "sd" ~ ("StorageDesc", required) + } + + _class("LoadProcess") { + "name" ~ (string, required) + "inputTables" ~ (array("Table"), collection) + "outputTable" ~ ("Table", required) + + } + + _class("View") { + "name" ~ (string, required) + "inputTables" ~ (array("Table"), collection) + } + } + } + +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala new file mode 100644 index 0000000..83fb817 --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.builders + +import org.apache.atlas.typesystem.types.{ClassType, Multiplicity, TypeSystem} + + +class InstanceBuilderTest extends BuilderTest { + + test("test1") { + TypeSystem.getInstance().defineTypes(tDef) + + val b = new InstanceBuilder + import b._ + + val instances = b create { + + val salesDB = instance("DB") { // use instance to create Referenceables. use closure to + // set attributes of instance + 'name ~ "Sales" // use '~' to set attributes. Use a Symbol (names starting with ') for + // attribute names. + 'owner ~ "John ETL" + 'createTime ~ 1000 + } + + val salesFact = instance("Table") { + 'name ~ "sales_fact" + 'db ~ salesDB + val sd = instance("StorageDesc") { // any valid scala allowed in closure. + 'inputFormat ~ "TextIputFormat" + 'outputFormat ~ "TextOutputFormat" + } + 'sd ~ sd // use ~ to set references, collections and maps. + val columns = Seq( + instance("Column") { + 'name ~ "time_id" + 'dataType ~ "int" + 'sd ~ sd + }, + instance("Column") { + 'name ~ "product_id" + 'dataType ~ "int" + 'sd ~ sd + }, + instance("Column") { + 'name ~ "customer_id" + 'dataType ~ "int" + 'sd ~ sd + }, + instance("Column", "Metric") { + 'name ~ "sales" + 'dataType ~ "int" + 'sd ~ sd + 'Metric("x") ~ 1 // use 'TraitName("attrName") to set values on traits. + } + ) + + 'columns ~ columns + + } + + salesFact.sd.inputFormat ~ "TextInputFormat" // use dot navigation to alter attributes in the object graph. + // here I am fixing the typo in "TextInputFormat" + // dot navigation also works for arrays. + // here I am fixing column(3). Metric trait has no attributes. + val c = salesFact.columns + c(3) = instance("Column", "Metric") { + 'name ~ "sales" + 'dataType ~ "int" + 'sd ~ salesFact.sd + } + + } + + val ts = TypeSystem.getInstance() + + import scala.collection.JavaConversions._ + val typedInstances = instances.map { i => + val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName) + iTyp.convert(i, Multiplicity.REQUIRED) + } + + typedInstances.foreach { i => + println(i) + } + + } + +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala new file mode 100644 index 0000000..eea904a --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.builders + +import org.apache.atlas.MetadataException +import org.apache.atlas.typesystem.types.{ClassType, Multiplicity, TypeSystem} +import org.scalatest.{BeforeAndAfterAll, FunSuite} + + +class MultiplicityTest extends FunSuite with BeforeAndAfterAll { + + override def beforeAll() = { + TypeSystem.getInstance().reset() + + val b = new TypesBuilder + import b._ + + val tDef = types { + + _trait("Dimension") {} + _trait("PII") {} + _trait("Metric") {} + _trait("ETL") {} + _trait("JdbcAccess") {} + + _class("DB") { + "name" ~ (string, required, indexed, unique) + "owner" ~ (string) + "createTime" ~ (int) + } + + _class("StorageDesc") { + "inputFormat" ~ (string, required) + "outputFormat" ~ (string, required) + } + + _class("Column") { + "name" ~ (string, required) + "dataType" ~ (string, required) + "sd" ~ ("StorageDesc", required) + } + + _class("Table", List()) { + "name" ~ (string, required, indexed) + "db" ~ ("DB", required) + "sd" ~ ("StorageDesc", required) + } + + _class("LoadProcess") { + "name" ~ (string, required) + "inputTables" ~ (array("Table"), collection) + "outputTable" ~ ("Table", required) + + } + + _class("View") { + "name" ~ (string, required) + "inputTables" ~ (array("Table"), collection) + } + + _class("AT") { + "name" ~ (string, required) + "stringSet" ~ (array("string"), multiplicty(0, Int.MaxValue, true)) + } + } + + TypeSystem.getInstance().defineTypes(tDef) + } + + test("test1") { + + val b = new InstanceBuilder + import b._ + + val instances = b create { + val a = instance("AT") { // use instance to create Referenceables. use closure to + // set attributes of instance + 'name ~ "A1" // use '~' to set attributes. Use a Symbol (names starting with ') for + 'stringSet ~ Seq("a", "a") + } + } + + val ts = TypeSystem.getInstance() + import scala.collection.JavaConversions._ + val typedInstances = instances.map { i => + val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName) + iTyp.convert(i, Multiplicity.REQUIRED) + } + + typedInstances.foreach { i => + println(i) + } + } + + test("WrongMultiplicity") { + val b = new TypesBuilder + import b._ + val tDef = types { + _class("Wrong") { + "name" ~ (string, required) + "stringSet" ~ (string, multiplicty(0, Int.MaxValue, true)) + } + } + val me = intercept[MetadataException] { + TypeSystem.getInstance().defineTypes(tDef) + } + assert("A multiplicty of more than one requires a collection type for attribute 'stringSet'" == me.getMessage) + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala new file mode 100644 index 0000000..4eb8f3c --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.builders + +import org.apache.atlas.typesystem.json.TypesSerialization +import org.apache.atlas.typesystem.types.TypeSystem +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner + +@RunWith(classOf[JUnitRunner]) +class TypesBuilderTest extends BuilderTest { + + + test("test1") { + TypeSystem.getInstance().defineTypes(tDef) + + println(TypesSerialization.toJson(TypeSystem.getInstance(), x => true)) + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala new file mode 100755 index 0000000..5556d77 --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala @@ -0,0 +1,242 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.json + +import com.google.common.collect.ImmutableList +import org.apache.atlas.typesystem.persistence.{Id, ReferenceableInstance, StructInstance} +import org.apache.atlas.typesystem.types._ +import org.apache.atlas.typesystem.types.utils.TypesUtil +import org.apache.atlas.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct} +import org.json4s.native.JsonMethods._ +import org.json4s.native.Serialization.{write => swrite, _} +import org.json4s.{NoTypeHints, _} +import org.junit.{Assert, Before, Test} + +class SerializationTest extends BaseTest { + + private[atlas] var structType: StructType = null + private[atlas] var recursiveStructType: StructType = null + + @Before + override def setup { + super.setup + structType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType] + recursiveStructType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType] + } + + @Test def test1 { + val s: Struct = BaseTest.createStruct() + val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED) + + Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}") + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new BigDecimalSerializer + new BigIntegerSerializer + + //Json representation + val ser = swrite(ts) + Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"" + BaseTest.TEST_DATE + "\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}") + + // Typed Struct read back + val ts1 = read[StructInstance](ser) + Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") + } + + @Test def test2 { + val s: Struct = BaseTest.createStruct() + val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED) + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new BigDecimalSerializer + new BigIntegerSerializer + + val ts1 = read[StructInstance]( + """ + {"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0, + "c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""") + // Typed Struct read from string + Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03T19:38:55.053Z\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") + } + + @Test def testTrait { + val A: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef("A", null, + TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), + TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), + TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE), + TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE)) + val B: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( + "B", ImmutableList.of[String]("A"), + TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)) + val C: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( + "C", ImmutableList.of[String]("A"), + TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE)) + val D: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( + "D", ImmutableList.of[String]("B", "C"), + TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE)) + + defineTraits(A, B, C, D) + + val DType: TraitType = getTypeSystem.getDataType(classOf[TraitType], "D").asInstanceOf[TraitType] + val s1: Struct = new Struct("D") + s1.set("d", 1) + s1.set("c", 1) + s1.set("b", true) + s1.set("a", 1) + s1.set("A.B.D.b", true) + s1.set("A.B.D.c", 2) + s1.set("A.B.D.d", 2) + s1.set("A.C.D.a", 3) + s1.set("A.C.D.b", false) + s1.set("A.C.D.c", 3) + s1.set("A.C.D.d", 3) + + val s: Struct = BaseTest.createStruct() + val ts: ITypedStruct = DType.convert(s1, Multiplicity.REQUIRED) + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new BigDecimalSerializer + new BigIntegerSerializer + + // Typed Struct : + Assert.assertEquals(ts.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") + + // Json representation : + val ser = swrite(ts) + Assert.assertEquals(ser, "{\"$typeName$\":\"D\",\"A.C.D.d\":3,\"A.B.D.c\":2,\"b\":true,\"A.C.D.c\":3,\"d\":1,\"A.B.D.b\":true,\"a\":1,\"A.C.D.b\":false,\"A.B.D.d\":2,\"c\":1,\"A.C.D.a\":3}") + + val ts1 = read[StructInstance]( + """ + {"$typeName$":"D","A.C.D.d":3,"A.B.D.c":2,"b":true,"A.C.D.c":3,"d":1, + "A.B.D.b":true,"a":1,"A.C.D.b":false,"A.B.D.d":2,"c":1,"A.C.D.a":3}""") + // Typed Struct read from string: + Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") + } + + def defineHRTypes(ts: TypeSystem) : Unit = { + val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( + "Department", + ImmutableList.of[String], + TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), + new AttributeDefinition("employees", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, true, "department")) + val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( + "Person", ImmutableList.of[String], + TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), + new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), + new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")) + val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( + "Manager", ImmutableList.of[String]("Person"), + new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, false, "manager")) + val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = + TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableList.of[String], + TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)) + + ts.defineTypes(ImmutableList.of[StructTypeDefinition], + ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef), + ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef) + ) + + } + + def defineHRDept() : Referenceable = { + val hrDept: Referenceable = new Referenceable("Department") + val john: Referenceable = new Referenceable("Person") + val jane: Referenceable = new Referenceable("Manager", "SecurityClearance") + hrDept.set("name", "hr") + john.set("name", "John") + john.set("department", hrDept.getId) + jane.set("name", "Jane") + jane.set("department", hrDept.getId) + john.set("manager", jane.getId) + hrDept.set("employees", ImmutableList.of[Referenceable](john, jane)) + jane.set("subordinates", ImmutableList.of[Id](john.getId)) + jane.getTrait("SecurityClearance").set("level", 1) + hrDept + } + + @Test def testClass { + + val ts: TypeSystem = getTypeSystem + defineHRTypes(ts) + val hrDept: Referenceable = defineHRDept() + + val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") + val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED) + + println(s"HR Dept Object Graph:\n${hrDept2}\n") + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + val ser = swrite(hrDept2) + println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") + + println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") + } + + @Test def testReference { + + val ts: TypeSystem = getTypeSystem + defineHRTypes(ts) + val hrDept: Referenceable = defineHRDept() + + + val jsonStr = InstanceSerialization.toJson(hrDept) + val hrDept2 = InstanceSerialization.fromJsonReferenceable(jsonStr) + + val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") + val hrDept3: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED) + + println(s"HR Dept Object Graph:\n${hrDept3}\n") + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + val ser = swrite(hrDept3) + println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") + + println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") + } + + @Test def testReference2 { + + val ts: TypeSystem = getTypeSystem + defineHRTypes(ts) + val hrDept: Referenceable = defineHRDept() + + val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") + val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED) + + val jsonStr = InstanceSerialization.toJson(hrDept2) + val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr) + + val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED) + + println(s"HR Dept Object Graph:\n${hrDept4}\n") + + implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + + new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer + + val ser = swrite(hrDept4) + println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") + + println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") + + } + +} \ No newline at end of file
