http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java b/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java new file mode 100755 index 0000000..56df843 --- /dev/null +++ b/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java @@ -0,0 +1,128 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.repository.typestore; + +import com.thinkaurelius.titan.core.TitanGraph; +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Vertex; +import junit.framework.Assert; +import org.apache.atlas.GraphTransaction; +import org.apache.atlas.MetadataException; +import org.apache.atlas.RepositoryMetadataModule; +import org.apache.atlas.TestUtils; +import org.apache.atlas.repository.graph.GraphHelper; +import org.apache.atlas.repository.graph.GraphProvider; +import org.apache.atlas.typesystem.TypesDef; +import org.apache.atlas.typesystem.types.AttributeDefinition; +import org.apache.atlas.typesystem.types.ClassType; +import org.apache.atlas.typesystem.types.DataTypes; +import org.apache.atlas.typesystem.types.EnumTypeDefinition; +import org.apache.atlas.typesystem.types.EnumValue; +import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition; +import org.apache.atlas.typesystem.types.StructTypeDefinition; +import org.apache.atlas.typesystem.types.TraitType; +import org.apache.atlas.typesystem.types.TypeSystem; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Guice; +import org.testng.annotations.Test; + +import javax.inject.Inject; +import java.util.List; + +@Guice(modules = RepositoryMetadataModule.class) +public class GraphBackedTypeStoreTest { + @Inject + private GraphProvider<TitanGraph> graphProvider; + + @Inject + private ITypeStore typeStore; + + private TypeSystem ts; + + @BeforeClass + public void setUp() throws Exception { + ts = TypeSystem.getInstance(); + ts.reset(); + TestUtils.defineDeptEmployeeTypes(ts); + } + + @Test + @GraphTransaction + public void testStore() throws MetadataException { + typeStore.store(ts); + dumpGraph(); + } + + private void dumpGraph() { + TitanGraph graph = graphProvider.get(); + for (Vertex v : graph.getVertices()) { + System.out.println("****v = " + GraphHelper.vertexString(v)); + for (Edge e : v.getEdges(Direction.OUT)) { + System.out.println("****e = " + GraphHelper.edgeString(e)); + } + } + } + + @Test (dependsOnMethods = "testStore") + @GraphTransaction + public void testRestore() throws Exception { + TypesDef types = typeStore.restore(); + + //validate enum + List<EnumTypeDefinition> enumTypes = types.enumTypesAsJavaList(); + Assert.assertEquals(1, enumTypes.size()); + EnumTypeDefinition orgLevel = enumTypes.get(0); + Assert.assertEquals(orgLevel.name, "OrgLevel"); + Assert.assertEquals(orgLevel.enumValues.length, 2); + EnumValue enumValue = orgLevel.enumValues[0]; + Assert.assertEquals(enumValue.value, "L1"); + Assert.assertEquals(enumValue.ordinal, 1); + + //validate class + List<StructTypeDefinition> structTypes = types.structTypesAsJavaList(); + Assert.assertEquals(1, structTypes.size()); + + boolean clsTypeFound = false; + List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList(); + for (HierarchicalTypeDefinition<ClassType> classType : classTypes) { + if (classType.typeName.equals("Manager")) { + ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName); + Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length); + Assert.assertEquals(expectedType.superTypes.size(), classType.superTypes.size()); + clsTypeFound = true; + } + } + Assert.assertTrue("Manager type not restored", clsTypeFound); + + //validate trait + List<HierarchicalTypeDefinition<TraitType>> traitTypes = types.traitTypesAsJavaList(); + Assert.assertEquals(1, traitTypes.size()); + HierarchicalTypeDefinition<TraitType> trait = traitTypes.get(0); + Assert.assertEquals("SecurityClearance", trait.typeName); + Assert.assertEquals(1, trait.attributeDefinitions.length); + AttributeDefinition attribute = trait.attributeDefinitions[0]; + Assert.assertEquals("level", attribute.name); + Assert.assertEquals(DataTypes.INT_TYPE.getName(), attribute.dataTypeName); + + //validate the new types + ts.reset(); + ts.defineTypes(types); + } +}
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/RepositoryServiceLoadingTest.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/hadoop/metadata/RepositoryServiceLoadingTest.java b/repository/src/test/java/org/apache/hadoop/metadata/RepositoryServiceLoadingTest.java deleted file mode 100755 index e5130fb..0000000 --- a/repository/src/test/java/org/apache/hadoop/metadata/RepositoryServiceLoadingTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata; - -import com.thinkaurelius.titan.core.TitanGraph; -import org.apache.hadoop.metadata.repository.graph.GraphProvider; -import org.testng.Assert; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -import javax.inject.Inject; - -/** - * Unit test for Guice injector service loading - * - * Uses TestNG's Guice annotation to load the necessary modules and inject the - * objects from Guice - */ -@Guice(modules = RepositoryMetadataModule.class) -public class RepositoryServiceLoadingTest { - - @Inject - private GraphProvider<TitanGraph> graphProvider; - - @Test - public void testGetGraphService() throws Exception { - Assert.assertNotNull(graphProvider); - Assert.assertNotNull(graphProvider.get()); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/TestUtils.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/hadoop/metadata/TestUtils.java b/repository/src/test/java/org/apache/hadoop/metadata/TestUtils.java deleted file mode 100755 index 8e2f82e..0000000 --- a/repository/src/test/java/org/apache/hadoop/metadata/TestUtils.java +++ /dev/null @@ -1,172 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata; - -import com.google.common.collect.ImmutableList; -import com.thinkaurelius.titan.core.TitanGraph; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Vertex; -import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter; -import org.apache.hadoop.metadata.repository.graph.GraphHelper; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; -import org.apache.hadoop.metadata.typesystem.types.ClassType; -import org.apache.hadoop.metadata.typesystem.types.DataTypes; -import org.apache.hadoop.metadata.typesystem.types.EnumType; -import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.EnumValue; -import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.Multiplicity; -import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.TraitType; -import org.apache.hadoop.metadata.typesystem.types.TypeSystem; -import org.testng.Assert; - -import java.io.File; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createStructTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createTraitTypeDef; - -/** - * Test utility class. - */ -public final class TestUtils { - - private TestUtils() { - } - - /** - * Dumps the graph in GSON format in the path returned. - * - * @param titanGraph handle to graph - * @return path to the dump file - * @throws Exception - */ - public static String dumpGraph(TitanGraph titanGraph) throws Exception { - File tempFile = File.createTempFile("graph", ".gson"); - System.out.println("tempFile.getPath() = " + tempFile.getPath()); - GraphSONWriter.outputGraph(titanGraph, tempFile.getPath()); - - System.out.println("Vertices:"); - for (Vertex vertex : titanGraph.getVertices()) { - System.out.println(GraphHelper.vertexString(vertex)); - } - - System.out.println("Edges:"); - for (Edge edge : titanGraph.getEdges()) { - System.out.println(GraphHelper.edgeString(edge)); - } - - return tempFile.getPath(); - } - - /** - * Class Hierarchy is: - * Department(name : String, employees : Array[Person]) - * Person(name : String, department : Department, manager : Manager) - * Manager(subordinates : Array[Person]) extends Person - * <p/> - * Persons can have SecurityClearance(level : Int) clearance. - */ - public static void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException { - - EnumTypeDefinition orgLevelEnum = - new EnumTypeDefinition("OrgLevel", new EnumValue("L1", 1), new EnumValue("L2", 2)); - ts.defineEnumType(orgLevelEnum); - - StructTypeDefinition addressDetails = createStructTypeDef("Address", - createRequiredAttrDef("street", DataTypes.STRING_TYPE), - createRequiredAttrDef("city", DataTypes.STRING_TYPE)); - - HierarchicalTypeDefinition<ClassType> deptTypeDef = - createClassTypeDef("Department", ImmutableList.<String>of(), - createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", - String.format("array<%s>", "Person"), Multiplicity.COLLECTION, true, - "department") - ); - - HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", - ImmutableList.<String>of(), - createRequiredAttrDef("name", DataTypes.STRING_TYPE), - createOptionalAttrDef("orgLevel", ts.getDataType(EnumType.class, "OrgLevel")), - createOptionalAttrDef("address", "Address"), - new AttributeDefinition("department", - "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", - "Manager", Multiplicity.OPTIONAL, false, "subordinates") - ); - - HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager", - ImmutableList.of("Person"), - new AttributeDefinition("subordinates", - String.format("array<%s>", "Person"), Multiplicity.COLLECTION, false, - "manager") - ); - - HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = createTraitTypeDef( - "SecurityClearance", - ImmutableList.<String>of(), - createRequiredAttrDef("level", DataTypes.INT_TYPE) - ); - - ts.defineTypes(ImmutableList.of(addressDetails), - ImmutableList.of(securityClearanceTypeDef), - ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef)); - } - - public static Referenceable createDeptEg1(TypeSystem ts) throws MetadataException { - Referenceable hrDept = new Referenceable("Department"); - Referenceable john = new Referenceable("Person"); - Referenceable jane = new Referenceable("Manager", "SecurityClearance"); - Referenceable johnAddr = new Referenceable("Address"); - Referenceable janeAddr = new Referenceable("Address"); - - hrDept.set("name", "hr"); - john.set("name", "John"); - john.set("department", hrDept); - johnAddr.set("street", "Stewart Drive"); - johnAddr.set("city", "Sunnyvale"); - john.set("address", johnAddr); - - jane.set("name", "Jane"); - jane.set("department", hrDept); - janeAddr.set("street", "Great America Parkway"); - janeAddr.set("city", "Santa Clara"); - jane.set("address", janeAddr); - - john.set("manager", jane); - - hrDept.set("employees", ImmutableList.of(john, jane)); - - jane.set("subordinates", ImmutableList.of(john)); - - jane.getTrait("SecurityClearance").set("level", 1); - - ClassType deptType = ts.getDataType(ClassType.class, "Department"); - ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); - Assert.assertNotNull(hrDept2); - - return hrDept; - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryServiceTest.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryServiceTest.java b/repository/src/test/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryServiceTest.java deleted file mode 100755 index 4d24c29..0000000 --- a/repository/src/test/java/org/apache/hadoop/metadata/discovery/GraphBackedDiscoveryServiceTest.java +++ /dev/null @@ -1,320 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.discovery; - -import com.google.common.collect.ImmutableList; -import com.thinkaurelius.titan.core.TitanGraph; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Vertex; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.metadata.RepositoryMetadataModule; -import org.apache.hadoop.metadata.TestUtils; -import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; -import org.apache.hadoop.metadata.query.HiveTitanSample; -import org.apache.hadoop.metadata.query.QueryTestsUtils; -import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository; -import org.apache.hadoop.metadata.repository.graph.GraphHelper; -import org.apache.hadoop.metadata.repository.graph.GraphProvider; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.types.ClassType; -import org.apache.hadoop.metadata.typesystem.types.DataTypes; -import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.Multiplicity; -import org.apache.hadoop.metadata.typesystem.types.TypeSystem; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONObject; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -import javax.inject.Inject; -import javax.script.Bindings; -import javax.script.ScriptEngine; -import javax.script.ScriptEngineManager; -import javax.script.ScriptException; -import java.io.File; - -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createClassTypeDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createOptionalAttrDef; -import static org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil.createRequiredAttrDef; - -@Guice(modules = RepositoryMetadataModule.class) -public class GraphBackedDiscoveryServiceTest { - - @Inject - private GraphProvider<TitanGraph> graphProvider; - - @Inject - private GraphBackedMetadataRepository repositoryService; - - @Inject - private GraphBackedDiscoveryService discoveryService; - - @BeforeClass - public void setUp() throws Exception { - TypeSystem typeSystem = TypeSystem.getInstance(); - typeSystem.reset(); - - QueryTestsUtils.setupTypes(); - setupSampleData(); - - TestUtils.defineDeptEmployeeTypes(typeSystem); - - Referenceable hrDept = TestUtils.createDeptEg1(typeSystem); - ClassType deptType = typeSystem.getDataType(ClassType.class, "Department"); - ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); - - repositoryService.createEntity(hrDept2); - } - - private void setupSampleData() throws ScriptException { - TitanGraph titanGraph = graphProvider.get(); - - ScriptEngineManager manager = new ScriptEngineManager(); - ScriptEngine engine = manager.getEngineByName("gremlin-groovy"); - Bindings bindings = engine.createBindings(); - bindings.put("g", titanGraph); - - String hiveGraphFile = FileUtils.getTempDirectory().getPath() - + File.separator + System.nanoTime() + ".gson"; - System.out.println("hiveGraphFile = " + hiveGraphFile); - HiveTitanSample.writeGson(hiveGraphFile); - bindings.put("hiveGraphFile", hiveGraphFile); - - engine.eval("g.loadGraphSON(hiveGraphFile)", bindings); - titanGraph.commit(); - - System.out.println("*******************Graph Dump****************************"); - for (Vertex vertex : titanGraph.getVertices()) { - System.out.println(GraphHelper.vertexString(vertex)); - } - - for (Edge edge : titanGraph.getEdges()) { - System.out.println(GraphHelper.edgeString(edge)); - } - System.out.println("*******************Graph Dump****************************"); - } - - @AfterClass - public void tearDown() throws Exception { - TypeSystem.getInstance().reset(); - } - - @Test - public void testSearchByDSL() throws Exception { - String dslQuery = "from Department"; - - String jsonResults = discoveryService.searchByDSL(dslQuery); - Assert.assertNotNull(jsonResults); - - JSONObject results = new JSONObject(jsonResults); - Assert.assertEquals(results.length(), 3); - System.out.println("results = " + results); - - Object query = results.get("query"); - Assert.assertNotNull(query); - - JSONObject dataType = results.getJSONObject("dataType"); - Assert.assertNotNull(dataType); - String typeName = dataType.getString("typeName"); - Assert.assertNotNull(typeName); - Assert.assertEquals(typeName, "Department"); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertNotNull(rows); - Assert.assertEquals(rows.length(), 1); - } - - @Test(expectedExceptions = Throwable.class) - public void testSearchByDSLBadQuery() throws Exception { - String dslQuery = "from blah"; - - discoveryService.searchByDSL(dslQuery); - Assert.fail(); - } - - @Test - public void testRawSearch1() throws Exception { - // Query for all Vertices in Graph - Object r = discoveryService.searchByGremlin("g.V.toList()"); - System.out.println("search result = " + r); - - // Query for all Vertices of a Type - r = discoveryService.searchByGremlin("g.V.filter{it.typeName == 'Department'}.toList()"); - System.out.println("search result = " + r); - - // Property Query: list all Person names - r = discoveryService - .searchByGremlin("g.V.filter{it.typeName == 'Person'}.'Person.name'.toList()"); - System.out.println("search result = " + r); - } - - @DataProvider(name = "dslQueriesProvider") - private Object[][] createDSLQueries() { - return new String[][] { - {"from DB"}, - {"DB"}, - {"DB where DB.name=\"Reporting\""}, - {"DB DB.name = \"Reporting\""}, - {"DB where DB.name=\"Reporting\" select name, owner"}, - {"DB has name"}, - {"DB, Table"}, - {"DB is JdbcAccess"}, - /* - {"DB, LoadProcess has name"}, - {"DB as db1, Table where db1.name = \"Reporting\""}, - {"DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()}, - */ - {"from Table"}, - {"Table"}, - {"Table is Dimension"}, - {"Column where Column isa PII"}, - {"View is Dimension"}, - /*{"Column where Column isa PII select Column.name"},*/ - {"Column select Column.name"}, - {"Column select name"}, - {"Column where Column.name=\"customer_id\""}, - {"from Table select Table.name"}, - {"DB where (name = \"Reporting\")"}, - {"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1"}, - {"DB where DB is JdbcAccess"}, - {"DB where DB has name"}, - {"DB Table"}, - {"DB where DB has name"}, - {"DB as db1 Table where (db1.name = \"Reporting\")"}, - {"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "}, - /* - todo: does not work - {"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"}, - {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"}, - {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"}, - {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"}, - {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"}, - */ - // trait searches - {"Dimension"}, - /*{"Fact"}, - todo: does not work*/ - {"JdbcAccess"}, - {"ETL"}, - {"Metric"}, - {"PII"}, - // Lineage - {"Table LoadProcess outputTable"}, - {"Table loop (LoadProcess outputTable)"}, - {"Table as _loop0 loop (LoadProcess outputTable) withPath"}, - {"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"}, - {"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"}, - {"Table where name='sales_fact', db where name='Reporting'"} - }; - } - - @Test (dataProvider = "dslQueriesProvider") - public void testSearchByDSLQueries(String dslQuery) throws Exception { - System.out.println("Executing dslQuery = " + dslQuery); - String jsonResults = discoveryService.searchByDSL(dslQuery); - Assert.assertNotNull(jsonResults); - - JSONObject results = new JSONObject(jsonResults); - Assert.assertEquals(results.length(), 3); - System.out.println("results = " + results); - - Object query = results.get("query"); - Assert.assertNotNull(query); - - JSONObject dataType = results.getJSONObject("dataType"); - Assert.assertNotNull(dataType); - String typeName = dataType.getString("typeName"); - Assert.assertNotNull(typeName); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertNotNull(rows); - Assert.assertTrue(rows.length() >= 0); // some queries may not have any results - System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows"); - } - - @DataProvider(name = "invalidDslQueriesProvider") - private Object[][] createInvalidDSLQueries() { - return new String[][] { - {"from Unknown"}, - {"Unknown"}, - {"Unknown is Blah"}, - }; - } - - @Test (dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class) - public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception { - System.out.println("Executing dslQuery = " + dslQuery); - discoveryService.searchByDSL(dslQuery); - Assert.fail(); - } - - @Test - public void testSearchForTypeInheritance() throws Exception { - createTypesWithMultiLevelInheritance(); - createInstances(); - - String dslQuery = "from D where a = 1"; - String jsonResults = discoveryService.searchByDSL(dslQuery); - Assert.assertNotNull(jsonResults); - - JSONObject results = new JSONObject(jsonResults); - System.out.println("results = " + results); - } - - /* - * Type Hierarchy is: - * A(a) - * B(b) extends A - * C(c) extends B - * D(d) extends C - */ - private void createTypesWithMultiLevelInheritance() throws Exception { - HierarchicalTypeDefinition A = createClassTypeDef("A", null, - createRequiredAttrDef("a", DataTypes.INT_TYPE)); - - HierarchicalTypeDefinition B = createClassTypeDef("B", ImmutableList.of("A"), - createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)); - - HierarchicalTypeDefinition C = createClassTypeDef("C", ImmutableList.of("B"), - createOptionalAttrDef("c", DataTypes.BYTE_TYPE)); - - HierarchicalTypeDefinition D = createClassTypeDef("D", ImmutableList.of("C"), - createOptionalAttrDef("d", DataTypes.SHORT_TYPE)); - - TypeSystem.getInstance().defineClassTypes(A, B, C, D); - } - - private void createInstances() throws Exception { - Referenceable instance = new Referenceable("D"); - instance.set("d", 1); - instance.set("c", 1); - instance.set("b", true); - instance.set("a", 1); - - ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, "D"); - ITypedReferenceableInstance typedInstance = - deptType.convert(instance, Multiplicity.REQUIRED); - - repositoryService.createEntity(typedInstance); - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/discovery/HiveLineageServiceTest.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/hadoop/metadata/discovery/HiveLineageServiceTest.java b/repository/src/test/java/org/apache/hadoop/metadata/discovery/HiveLineageServiceTest.java deleted file mode 100644 index 61526bb..0000000 --- a/repository/src/test/java/org/apache/hadoop/metadata/discovery/HiveLineageServiceTest.java +++ /dev/null @@ -1,590 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.discovery; - -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import org.apache.hadoop.metadata.RepositoryMetadataModule; -import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService; -import org.apache.hadoop.metadata.repository.EntityNotFoundException; -import org.apache.hadoop.metadata.services.DefaultMetadataService; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.TypesDef; -import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization; -import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; -import org.apache.hadoop.metadata.typesystem.persistence.Id; -import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; -import org.apache.hadoop.metadata.typesystem.types.ClassType; -import org.apache.hadoop.metadata.typesystem.types.DataTypes; -import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.IDataType; -import org.apache.hadoop.metadata.typesystem.types.Multiplicity; -import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.TraitType; -import org.apache.hadoop.metadata.typesystem.types.TypeUtils; -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONObject; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -import javax.inject.Inject; -import java.util.List; - -/** - * Unit tests for Hive LineageService. - */ -@Guice(modules = RepositoryMetadataModule.class) -public class HiveLineageServiceTest { - - @Inject - private DefaultMetadataService metadataService; - - @Inject - private GraphBackedDiscoveryService discoveryService; - - @Inject - private HiveLineageService hiveLineageService; - -// @Inject -// private GraphProvider<TitanGraph> graphProvider; - - @BeforeClass - public void setUp() throws Exception { - setUpTypes(); - setupInstances(); - - // TestUtils.dumpGraph(graphProvider.get()); - } - - @DataProvider(name = "dslQueriesProvider") - private Object[][] createDSLQueries() { - return new String[][] { - // joins - {"hive_table where name=\"sales_fact\", columns"}, - {"hive_table where name=\"sales_fact\", columns select name, dataType, comment"}, - {"hive_table where name=\"sales_fact\", columns as c select c.name, c.dataType, c.comment"}, -// {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name, table.name"}, - {"from hive_db"}, - {"hive_db"}, - {"hive_db where hive_db.name=\"Reporting\""}, - {"hive_db hive_db.name = \"Reporting\""}, - {"hive_db where hive_db.name=\"Reporting\" select name, owner"}, - {"hive_db has name"}, -// {"hive_db, hive_table"}, -// {"hive_db, hive_process has name"}, -// {"hive_db as db1, hive_table where db1.name = \"Reporting\""}, -// {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System.currentTimeMillis()}, - {"from hive_table"}, - {"hive_table"}, - {"hive_table is Dimension"}, - {"hive_column where hive_column isa PII"}, -// {"hive_column where hive_column isa PII select hive_column.name"}, - {"hive_column select hive_column.name"}, - {"hive_column select name"}, - {"hive_column where hive_column.name=\"customer_id\""}, - {"from hive_table select hive_table.name"}, - {"hive_db where (name = \"Reporting\")"}, - {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1"}, - {"hive_db where hive_db has name"}, -// {"hive_db hive_table"}, - {"hive_db where hive_db has name"}, -// {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"}, - {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "}, -// {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"}, -// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"}, -// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName"}, -// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"}, -// {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"}, - // trait searches - {"Dimension"}, - {"Fact"}, - {"ETL"}, - {"Metric"}, - {"PII"}, - }; - } - - @Test (dataProvider = "dslQueriesProvider") - public void testSearchByDSLQueries(String dslQuery) throws Exception { - System.out.println("Executing dslQuery = " + dslQuery); - String jsonResults = discoveryService.searchByDSL(dslQuery); - Assert.assertNotNull(jsonResults); - - JSONObject results = new JSONObject(jsonResults); - Assert.assertEquals(results.length(), 3); - System.out.println("results = " + results); - - Object query = results.get("query"); - Assert.assertNotNull(query); - - JSONObject dataType = results.getJSONObject("dataType"); - Assert.assertNotNull(dataType); - String typeName = dataType.getString("typeName"); - Assert.assertNotNull(typeName); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertNotNull(rows); - Assert.assertTrue(rows.length() >= 0); // some queries may not have any results - System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows"); - } - - @Test - public void testGetInputs() throws Exception { - JSONObject results = new JSONObject(hiveLineageService.getInputs("sales_fact_monthly_mv")); - Assert.assertNotNull(results); - System.out.println("inputs = " + results); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertTrue(rows.length() > 0); - - final JSONObject row = rows.getJSONObject(0); - JSONArray paths = row.getJSONArray("path"); - Assert.assertTrue(paths.length() > 0); - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetInputsTableNameNull() throws Exception { - hiveLineageService.getInputs(null); - Assert.fail(); - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetInputsTableNameEmpty() throws Exception { - hiveLineageService.getInputs(""); - Assert.fail(); - } - - @Test (expectedExceptions = EntityNotFoundException.class) - public void testGetInputsBadTableName() throws Exception { - hiveLineageService.getInputs("blah"); - Assert.fail(); - } - - @Test - public void testGetInputsGraph() throws Exception { - JSONObject results = new JSONObject( - hiveLineageService.getInputsGraph("sales_fact_monthly_mv")); - Assert.assertNotNull(results); - System.out.println("inputs graph = " + results); - - JSONObject values = results.getJSONObject("values"); - Assert.assertNotNull(values); - - final JSONObject vertices = values.getJSONObject("vertices"); - Assert.assertEquals(vertices.length(), 4); - - final JSONObject edges = values.getJSONObject("edges"); - Assert.assertEquals(edges.length(), 4); - } - - @Test - public void testGetOutputs() throws Exception { - JSONObject results = new JSONObject(hiveLineageService.getOutputs("sales_fact")); - Assert.assertNotNull(results); - System.out.println("outputs = " + results); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertTrue(rows.length() > 0); - - final JSONObject row = rows.getJSONObject(0); - JSONArray paths = row.getJSONArray("path"); - Assert.assertTrue(paths.length() > 0); - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetOututsTableNameNull() throws Exception { - hiveLineageService.getOutputs(null); - Assert.fail(); - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetOutputsTableNameEmpty() throws Exception { - hiveLineageService.getOutputs(""); - Assert.fail(); - } - - @Test (expectedExceptions = EntityNotFoundException.class) - public void testGetOutputsBadTableName() throws Exception { - hiveLineageService.getOutputs("blah"); - Assert.fail(); - } - - @Test - public void testGetOutputsGraph() throws Exception { - JSONObject results = new JSONObject(hiveLineageService.getOutputsGraph("sales_fact")); - Assert.assertNotNull(results); - System.out.println("outputs graph = " + results); - - JSONObject values = results.getJSONObject("values"); - Assert.assertNotNull(values); - - final JSONObject vertices = values.getJSONObject("vertices"); - Assert.assertEquals(vertices.length(), 3); - - final JSONObject edges = values.getJSONObject("edges"); - Assert.assertEquals(edges.length(), 4); - } - - @DataProvider(name = "tableNamesProvider") - private Object[][] tableNames() { - return new String[][] { - {"sales_fact", "4"}, - {"time_dim", "3"}, - {"sales_fact_daily_mv", "4"}, - {"sales_fact_monthly_mv", "4"} - }; - } - - @Test (dataProvider = "tableNamesProvider") - public void testGetSchema(String tableName, String expected) throws Exception { - JSONObject results = new JSONObject(hiveLineageService.getSchema(tableName)); - Assert.assertNotNull(results); - System.out.println("columns = " + results); - - JSONArray rows = results.getJSONArray("rows"); - Assert.assertEquals(rows.length(), Integer.parseInt(expected)); - - for (int index = 0; index < rows.length(); index++) { - final JSONObject row = rows.getJSONObject(index); - Assert.assertNotNull(row.getString("name")); - Assert.assertNotNull(row.getString("comment")); - Assert.assertNotNull(row.getString("dataType")); - Assert.assertEquals(row.getString("$typeName$"), "hive_column"); - } - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetSchemaTableNameNull() throws Exception { - hiveLineageService.getSchema(null); - Assert.fail(); - } - - @Test (expectedExceptions = IllegalArgumentException.class) - public void testGetSchemaTableNameEmpty() throws Exception { - hiveLineageService.getSchema(""); - Assert.fail(); - } - - @Test (expectedExceptions = EntityNotFoundException.class) - public void testGetSchemaBadTableName() throws Exception { - hiveLineageService.getSchema("blah"); - Assert.fail(); - } - - private void setUpTypes() throws Exception { - TypesDef typesDef = createTypeDefinitions(); - String typesAsJSON = TypesSerialization.toJson(typesDef); - metadataService.createType(typesAsJSON); - } - - private static final String DATABASE_TYPE = "hive_db"; - private static final String HIVE_TABLE_TYPE = "hive_table"; - private static final String COLUMN_TYPE = "hive_column"; - private static final String HIVE_PROCESS_TYPE = "hive_process"; - private static final String STORAGE_DESC_TYPE = "StorageDesc"; - private static final String VIEW_TYPE = "View"; - - private TypesDef createTypeDefinitions() { - HierarchicalTypeDefinition<ClassType> dbClsDef - = TypesUtil.createClassTypeDef(DATABASE_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - attrDef("description", DataTypes.STRING_TYPE), - attrDef("locationUri", DataTypes.STRING_TYPE), - attrDef("owner", DataTypes.STRING_TYPE), - attrDef("createTime", DataTypes.INT_TYPE) - ); - - HierarchicalTypeDefinition<ClassType> storageDescClsDef = - TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null, - attrDef("location", DataTypes.STRING_TYPE), - attrDef("inputFormat", DataTypes.STRING_TYPE), - attrDef("outputFormat", DataTypes.STRING_TYPE), - attrDef("compressed", DataTypes.STRING_TYPE, - Multiplicity.REQUIRED, false, null) - ); - - HierarchicalTypeDefinition<ClassType> columnClsDef = - TypesUtil.createClassTypeDef(COLUMN_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - attrDef("dataType", DataTypes.STRING_TYPE), - attrDef("comment", DataTypes.STRING_TYPE) - ); - - HierarchicalTypeDefinition<ClassType> tblClsDef = - TypesUtil.createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"), - attrDef("owner", DataTypes.STRING_TYPE), - attrDef("createTime", DataTypes.INT_TYPE), - attrDef("lastAccessTime", DataTypes.INT_TYPE), - attrDef("tableType", DataTypes.STRING_TYPE), - attrDef("temporary", DataTypes.BOOLEAN_TYPE), - new AttributeDefinition("db", DATABASE_TYPE, - Multiplicity.REQUIRED, false, null), - new AttributeDefinition("sd", STORAGE_DESC_TYPE, - Multiplicity.REQUIRED, true, null), - new AttributeDefinition("columns", - DataTypes.arrayTypeName(COLUMN_TYPE), - Multiplicity.COLLECTION, true, null) - ); - - HierarchicalTypeDefinition<ClassType> loadProcessClsDef = - TypesUtil.createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"), - attrDef("userName", DataTypes.STRING_TYPE), - attrDef("startTime", DataTypes.INT_TYPE), - attrDef("endTime", DataTypes.INT_TYPE), - attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), - attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), - attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), - attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED) - ); - - HierarchicalTypeDefinition<ClassType> viewClsDef = - TypesUtil.createClassTypeDef(VIEW_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("db", DATABASE_TYPE, - Multiplicity.REQUIRED, false, null), - new AttributeDefinition("inputTables", - DataTypes.arrayTypeName(HIVE_TABLE_TYPE), - Multiplicity.COLLECTION, false, null) - ); - - HierarchicalTypeDefinition<TraitType> dimTraitDef = - TypesUtil.createTraitTypeDef("Dimension", null); - - HierarchicalTypeDefinition<TraitType> factTraitDef = - TypesUtil.createTraitTypeDef("Fact", null); - - HierarchicalTypeDefinition<TraitType> metricTraitDef = - TypesUtil.createTraitTypeDef("Metric", null); - - HierarchicalTypeDefinition<TraitType> etlTraitDef = - TypesUtil.createTraitTypeDef("ETL", null); - - HierarchicalTypeDefinition<TraitType> piiTraitDef = - TypesUtil.createTraitTypeDef("PII", null); - - HierarchicalTypeDefinition<TraitType> jdbcTraitDef = - TypesUtil.createTraitTypeDef("JdbcAccess", null); - - return TypeUtils.getTypesDef( - ImmutableList.<EnumTypeDefinition>of(), - ImmutableList.<StructTypeDefinition>of(), - ImmutableList.of(dimTraitDef, factTraitDef, - piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef), - ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, - tblClsDef, loadProcessClsDef, viewClsDef) - ); - } - - AttributeDefinition attrDef(String name, IDataType dT) { - return attrDef(name, dT, Multiplicity.OPTIONAL, false, null); - } - - AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) { - return attrDef(name, dT, m, false, null); - } - - AttributeDefinition attrDef(String name, IDataType dT, - Multiplicity m, boolean isComposite, String reverseAttributeName) { - Preconditions.checkNotNull(name); - Preconditions.checkNotNull(dT); - return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName); - } - - private void setupInstances() throws Exception { - Id salesDB = database( - "Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - - Referenceable sd = storageDescriptor("hdfs://host:8000/apps/warehouse/sales", - "TextInputFormat", "TextOutputFormat", true); - - List<Referenceable> salesFactColumns = ImmutableList.of( - column("time_id", "int", "time id"), - column("product_id", "int", "product id"), - column("customer_id", "int", "customer id", "PII"), - column("sales", "double", "product id", "Metric") - ); - - Id salesFact = table("sales_fact", "sales fact table", - salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact"); - - List<Referenceable> timeDimColumns = ImmutableList.of( - column("time_id", "int", "time id"), - column("dayOfYear", "int", "day Of Year"), - column("weekDay", "int", "week Day") - ); - - Id timeDim = table("time_dim", "time dimension table", - salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension"); - - Id reportingDB = database("Reporting", "reporting database", "Jane BI", - "hdfs://host:8000/apps/warehouse/reporting"); - - Id salesFactDaily = table("sales_fact_daily_mv", - "sales fact daily materialized view", - reportingDB, sd, "Joe BI", "Managed", salesFactColumns, "Metric"); - - loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", - ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily), - "create table as select ", "plan", "id", "graph", - "ETL"); - - List<Referenceable> productDimColumns = ImmutableList.of( - column("product_id", "int", "product id"), - column("product_name", "string", "product name"), - column("brand_name", "int", "brand name") - ); - - Id productDim = table("product_dim", "product dimension table", - salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension"); - - view("product_dim_view", reportingDB, - ImmutableList.of(productDim), "Dimension", "JdbcAccess"); - - List<Referenceable> customerDimColumns = ImmutableList.of( - column("customer_id", "int", "customer id", "PII"), - column("name", "string", "customer name", "PII"), - column("address", "string", "customer address", "PII") - ); - - Id customerDim = table("customer_dim", "customer dimension table", - salesDB, sd, "fetl", "External", customerDimColumns, "Dimension"); - - view("customer_dim_view", reportingDB, - ImmutableList.of(customerDim), "Dimension", "JdbcAccess"); - - Id salesFactMonthly = table("sales_fact_monthly_mv", - "sales fact monthly materialized view", - reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric"); - - loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", - ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly), - "create table as select ", "plan", "id", "graph", - "ETL"); - } - - Id database(String name, String description, - String owner, String locationUri, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("locationUri", locationUri); - referenceable.set("createTime", System.currentTimeMillis()); - - return createInstance(referenceable); - } - - Referenceable storageDescriptor(String location, String inputFormat, - String outputFormat, - boolean compressed) throws Exception { - Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE); - referenceable.set("location", location); - referenceable.set("inputFormat", inputFormat); - referenceable.set("outputFormat", outputFormat); - referenceable.set("compressed", compressed); - - return referenceable; - } - - Referenceable column(String name, String dataType, String comment, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("dataType", dataType); - referenceable.set("comment", comment); - - return referenceable; - } - - Id table(String name, String description, - Id dbId, Referenceable sd, - String owner, String tableType, - List<Referenceable> columns, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("description", description); - referenceable.set("owner", owner); - referenceable.set("tableType", tableType); - referenceable.set("createTime", System.currentTimeMillis()); - referenceable.set("lastAccessTime", System.currentTimeMillis()); - referenceable.set("retention", System.currentTimeMillis()); - - referenceable.set("db", dbId); - referenceable.set("sd", sd); - referenceable.set("columns", columns); - - return createInstance(referenceable); - } - - Id loadProcess(String name, String description, String user, - List<Id> inputTables, - List<Id> outputTables, - String queryText, String queryPlan, - String queryId, String queryGraph, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("description", description); - referenceable.set("user", user); - referenceable.set("startTime", System.currentTimeMillis()); - referenceable.set("endTime", System.currentTimeMillis() + 10000); - - referenceable.set("inputs", inputTables); - referenceable.set("outputs", outputTables); - - referenceable.set("queryText", queryText); - referenceable.set("queryPlan", queryPlan); - referenceable.set("queryId", queryId); - referenceable.set("queryGraph", queryGraph); - - return createInstance(referenceable); - } - - Id view(String name, Id dbId, - List<Id> inputTables, - String... traitNames) throws Exception { - Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); - referenceable.set("name", name); - referenceable.set("db", dbId); - - referenceable.set("inputTables", inputTables); - - return createInstance(referenceable); - } - - private Id createInstance(Referenceable referenceable) throws Exception { - String typeName = referenceable.getTypeName(); - System.out.println("creating instance of type " + typeName); - - String entityJSON = InstanceSerialization.toJson(referenceable, true); - System.out.println("Submitting new entity= " + entityJSON); - String guid = metadataService.createEntity(entityJSON); - System.out.println("created instance for type " + typeName + ", guid: " + guid); - - // return the reference to created instance with guid - return new Id(guid, 0, referenceable.getTypeName()); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/repository/BaseTest.java ---------------------------------------------------------------------- diff --git a/repository/src/test/java/org/apache/hadoop/metadata/repository/BaseTest.java b/repository/src/test/java/org/apache/hadoop/metadata/repository/BaseTest.java deleted file mode 100755 index f4e9f18..0000000 --- a/repository/src/test/java/org/apache/hadoop/metadata/repository/BaseTest.java +++ /dev/null @@ -1,206 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.repository; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import org.apache.hadoop.metadata.MetadataException; -import org.apache.hadoop.metadata.repository.memory.MemRepository; -import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; -import org.apache.hadoop.metadata.typesystem.Referenceable; -import org.apache.hadoop.metadata.typesystem.Struct; -import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; -import org.apache.hadoop.metadata.typesystem.types.ClassType; -import org.apache.hadoop.metadata.typesystem.types.DataTypes; -import org.apache.hadoop.metadata.typesystem.types.HierarchicalType; -import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.IDataType; -import org.apache.hadoop.metadata.typesystem.types.Multiplicity; -import org.apache.hadoop.metadata.typesystem.types.StructType; -import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; -import org.apache.hadoop.metadata.typesystem.types.TraitType; -import org.apache.hadoop.metadata.typesystem.types.TypeSystem; -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; -import org.junit.Before; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Date; -import java.util.Map; - -public abstract class BaseTest { - - public static final String STRUCT_TYPE_1 = "t1"; - public static final String STRUCT_TYPE_2 = "t2"; - public static final String TEST_DATE = "2014-12-11T02:35:58.440Z"; - public static final long TEST_DATE_IN_LONG=1418265358440L; - protected IRepository repo; - - public static Struct createStruct() throws MetadataException { - StructType structType = (StructType) TypeSystem.getInstance() - .getDataType(StructType.class, STRUCT_TYPE_1); - Struct s = new Struct(structType.getName()); - s.set("a", 1); - s.set("b", true); - s.set("c", (byte) 1); - s.set("d", (short) 2); - s.set("e", 1); - s.set("f", 1); - s.set("g", 1L); - s.set("h", 1.0f); - s.set("i", 1.0); - s.set("j", BigInteger.valueOf(1L)); - s.set("k", new BigDecimal(1)); - s.set("l", new Date(1418265358440L)); - s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)})); - s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), - new BigDecimal[]{BigDecimal.valueOf(1.1)})); - Map<String, Double> hm = Maps.<String, Double>newHashMap(); - hm.put("a", 1.0); - hm.put("b", 2.0); - s.set("o", hm); - return s; - } - - protected final TypeSystem getTypeSystem() { - return TypeSystem.getInstance(); - } - - protected final IRepository getRepository() { - return repo; - } - - @Before - public void setup() throws Exception { - - TypeSystem ts = TypeSystem.getInstance(); - ts.reset(); - repo = new MemRepository(ts); - - StructType structType = ts.defineStructType(STRUCT_TYPE_1, - true, - TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE), - TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE), - TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), - TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE), - TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE), - TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE), - TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE), - TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)), - TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), - TypesUtil.createOptionalAttrDef("o", - ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))); - - StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2, - true, - TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2)); - } - - protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) - throws MetadataException { - - return getTypeSystem().defineTraitTypes(tDefs); - } - - /* - * Class Hierarchy is: - * Department(name : String, employees : Array[Person]) - * Person(name : String, department : Department, manager : Manager) - * Manager(subordinates : Array[Person]) extends Person - * - * Persons can have SecurityClearance(level : Int) clearance. - */ - protected void defineDeptEmployeeTypes(TypeSystem ts) throws MetadataException { - - HierarchicalTypeDefinition<ClassType> deptTypeDef = - TypesUtil.createClassTypeDef("Department", ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, true, "department") - ); - HierarchicalTypeDefinition<ClassType> personTypeDef = - TypesUtil.createClassTypeDef("Person", ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("department", - "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", - "Manager", Multiplicity.OPTIONAL, false, "subordinates") - ); - HierarchicalTypeDefinition<ClassType> managerTypeDef = - TypesUtil.createClassTypeDef("Manager", - ImmutableList.<String>of("Person"), - new AttributeDefinition("subordinates", - String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, false, "manager") - ); - - HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = - TypesUtil.createTraitTypeDef("SecurityClearance", - ImmutableList.<String>of(), - TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE) - ); - - ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), - ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef), - ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, - managerTypeDef)); - - ImmutableList<HierarchicalType> types = ImmutableList.of( - ts.getDataType(HierarchicalType.class, "SecurityClearance"), - ts.getDataType(ClassType.class, "Department"), - ts.getDataType(ClassType.class, "Person"), - ts.getDataType(ClassType.class, "Manager") - ); - - repo.defineTypes(types); - - } - - protected Referenceable createDeptEg1(TypeSystem ts) throws MetadataException { - Referenceable hrDept = new Referenceable("Department"); - Referenceable john = new Referenceable("Person"); - Referenceable jane = new Referenceable("Manager", "SecurityClearance"); - - hrDept.set("name", "hr"); - john.set("name", "John"); - john.set("department", hrDept); - jane.set("name", "Jane"); - jane.set("department", hrDept); - - john.set("manager", jane); - - hrDept.set("employees", ImmutableList.<Referenceable>of(john, jane)); - - jane.set("subordinates", ImmutableList.<Referenceable>of(john)); - - jane.getTrait("SecurityClearance").set("level", 1); - - ClassType deptType = ts.getDataType(ClassType.class, "Department"); - ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED); - - return hrDept; - } -}
