ATLAS-128 DSL - Add support for comparisions on list type (suma.shivaprasad via 
shwethags)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/7660c9b2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/7660c9b2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/7660c9b2

Branch: refs/heads/master
Commit: 7660c9b2921df6baacf88791077424a374f4aae9
Parents: 539f243
Author: Shwetha GS <[email protected]>
Authored: Mon Oct 5 21:18:21 2015 +0530
Committer: Shwetha GS <[email protected]>
Committed: Mon Oct 5 21:18:21 2015 +0530

----------------------------------------------------------------------
 .../org/apache/atlas/hive/hook/HiveHookIT.java  |   12 +-
 release-log.txt                                 |    1 +
 .../graph/DefaultGraphPersistenceStrategy.java  |   46 +-
 .../atlas/repository/MetadataRepository.java    |    2 +-
 .../graph/GraphBackedMetadataRepository.java    |  131 +-
 .../repository/graph/TitanGraphProvider.java    |   16 +-
 .../org/apache/atlas/query/Expressions.scala    |   31 +-
 .../query/GraphPersistenceStrategies.scala      |   55 +-
 .../org/apache/atlas/query/GremlinQuery.scala   |   43 +-
 .../org/apache/atlas/query/QueryParser.scala    |    9 +-
 .../scala/org/apache/atlas/query/Resolver.scala |    2 +-
 .../apache/atlas/BaseHiveRepositoryTest.java    |  357 +++++
 .../GraphBackedDiscoveryServiceTest.java        |  161 +-
 .../atlas/discovery/HiveLineageServiceTest.java |  289 +---
 .../GraphBackedMetadataRepositoryTest.java      |   16 +-
 .../graph/GraphRepoMapperScaleTest.java         |    6 +-
 .../org/apache/atlas/query/GremlinTest.scala    | 1493 ++++++++++++------
 .../org/apache/atlas/query/GremlinTest2.scala   |   15 +-
 .../apache/atlas/query/HiveTitanSample.scala    |  101 +-
 .../apache/atlas/query/LineageQueryTest.scala   |   60 +-
 .../org/apache/atlas/query/ParserTest.scala     |   11 +-
 .../apache/atlas/query/QueryTestsUtils.scala    |   39 +-
 .../atlas/typesystem/types/DataTypes.java       |    4 +-
 23 files changed, 1861 insertions(+), 1039 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git 
a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java 
b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index 589ef16..f4b9350 100755
--- 
a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ 
b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -329,12 +329,16 @@ public class HiveHookIT {
         String tableType = HiveDataTypes.HIVE_TABLE.getName();
 
         LOG.debug("Searching for partition of {}.{} with values {}", dbName, 
tableName, value);
-        //todo replace with DSL
-        String gremlinQuery = String.format("g.V.has('__typeName', 
'%s').has('%s.values', ['%s']).as('p')."
+       /*  gremlinQuery = String.format("g.V.has('__typeName', 
'%s').has('%s.values', ['%s']).as('p')."
                         + "out('__%s.table').has('%s.tableName', 
'%s').out('__%s.db').has('%s.name', '%s')"
                         + ".has('%s.clusterName', '%s').back('p').toList()", 
typeName, typeName, value, typeName,
-                tableType, tableName.toLowerCase(), tableType, dbType, 
dbName.toLowerCase(), dbType, CLUSTER_NAME);
-        assertEntityIsRegistered(gremlinQuery);
+                 tableType, tableName.toLowerCase(), tableType, dbType, 
dbName.toLowerCase(), dbType, CLUSTER_NAME);
+         */
+        String dslQuery = String.format("%s as p where values = ['%s'], table 
where tableName = '%s', "
+                               + "db where name = '%s' and clusterName = '%s' 
select p", typeName, value,
+                            tableName.toLowerCase(), dbName.toLowerCase(), 
CLUSTER_NAME);
+
+        assertEntityIsRegistered(dslQuery);
     }
 
     private String assertEntityIsRegistered(final String query) throws 
Exception {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index ceaa493..503afd5 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -9,6 +9,7 @@ ATLAS-54 Rename configs in hive hook (shwethags)
 ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via 
shwethags)
 
 ALL CHANGES:
+ATLAS-128 DSL - Add support for comparisions on list type (suma.shivaprasad 
via shwethags)
 ATLAS-168 Atlas UI - Max column in hive 4 (darshankumar89 via shwethags)
 ATLAS-155 Images do not show up on the dashboard (darshankumar89 via shwethags)
 ATLAS-134 Some defects found when reviewing the source code (ltfxyz via 
shwethags)

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
 
b/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
index c446d15..65a46a4 100755
--- 
a/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
+++ 
b/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
@@ -18,6 +18,8 @@
 
 package org.apache.atlas.discovery.graph;
 
+import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
 import com.thinkaurelius.titan.core.TitanVertex;
 import org.apache.atlas.AtlasException;
 import org.apache.atlas.query.Expressions;
@@ -32,6 +34,7 @@ import 
org.apache.atlas.typesystem.ITypedReferenceableInstance;
 import org.apache.atlas.typesystem.ITypedStruct;
 import org.apache.atlas.typesystem.persistence.Id;
 import org.apache.atlas.typesystem.types.AttributeInfo;
+import org.apache.atlas.typesystem.types.DataTypes;
 import org.apache.atlas.typesystem.types.IDataType;
 import org.apache.atlas.typesystem.types.Multiplicity;
 import org.apache.atlas.typesystem.types.StructType;
@@ -40,6 +43,7 @@ import org.apache.atlas.typesystem.types.TypeSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -106,11 +110,18 @@ public class DefaultGraphPersistenceStrategy implements 
GraphPersistenceStrategi
             case PRIMITIVE:
             case ENUM:
                 return dataType.convert(value, Multiplicity.OPTIONAL);
-
             case ARRAY:
-                // todo
-                break;
-
+                DataTypes.ArrayType arrType = (DataTypes.ArrayType) dataType;
+                IDataType<?> elemType = arrType.getElemType();
+                ImmutableCollection.Builder result = ImmutableList.builder();
+                List list = (List) value;
+                for(Object listElement : list) {
+                    Object collectionEntry = 
constructCollectionEntry(elemType, listElement);
+                    if(collectionEntry != null) {
+                        result.add(collectionEntry);
+                    }
+                }
+                return (U)result.build();
             case MAP:
                 // todo
                 break;
@@ -128,7 +139,7 @@ public class DefaultGraphPersistenceStrategy implements 
GraphPersistenceStrategi
 
                 } else {
                     metadataRepository.getGraphToInstanceMapper()
-                            .mapVertexToInstance(structVertex, structInstance, 
structType.fieldMapping().fields);
+                        .mapVertexToInstance(structVertex, structInstance, 
structType.fieldMapping().fields);
                 }
                 return dataType.convert(structInstance, Multiplicity.OPTIONAL);
 
@@ -141,14 +152,14 @@ public class DefaultGraphPersistenceStrategy implements 
GraphPersistenceStrategi
                 // 
metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
                 //        traitVertex, dataType.getName(), , traitType, 
traitInstance);
                 metadataRepository.getGraphToInstanceMapper()
-                        .mapVertexToInstance(traitVertex, traitInstance, 
traitType.fieldMapping().fields);
+                    .mapVertexToInstance(traitVertex, traitInstance, 
traitType.fieldMapping().fields);
                 break;
 
             case CLASS:
                 TitanVertex classVertex = (TitanVertex) value;
                 ITypedReferenceableInstance classInstance = 
metadataRepository.getGraphToInstanceMapper()
-                        
.mapGraphToTypedInstance(classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
-                                classVertex);
+                    
.mapGraphToTypedInstance(classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
+                        classVertex);
                 return dataType.convert(classInstance, Multiplicity.OPTIONAL);
 
             default:
@@ -161,6 +172,25 @@ public class DefaultGraphPersistenceStrategy implements 
GraphPersistenceStrategi
         return null;
     }
 
+    public <U> U constructCollectionEntry(IDataType<U> elementType, Object 
value) throws AtlasException {
+        switch (elementType.getTypeCategory()) {
+        case PRIMITIVE:
+        case ENUM:
+            return constructInstance(elementType, value);
+        //The array values in case of STRUCT, CLASS contain the edgeId if the 
outgoing edge which links to the STRUCT, CLASS vertex referenced
+        case STRUCT:
+        case CLASS:
+            String edgeId = (String) value;
+            return (U) 
metadataRepository.getGraphToInstanceMapper().getReferredEntity(edgeId, 
elementType);
+        case ARRAY:
+        case MAP:
+        case TRAIT:
+            return null;
+        default:
+            throw new UnsupportedOperationException("Load for type " + 
elementType + " in collections is not supported");
+        }
+    }
+
     @Override
     public String edgeLabel(TypeUtils.FieldInfo fInfo) {
         return fInfo.reverseDataType() == null ? edgeLabel(fInfo.dataType(), 
fInfo.attrInfo()) :

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java 
b/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
index 97d1fe2..c11d9a0 100755
--- 
a/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
+++ 
b/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
@@ -152,7 +152,7 @@ public interface MetadataRepository {
     void deleteTrait(String guid, String traitNameToBeDeleted) throws 
RepositoryException;
 
     /**
-     * Adds the property to the entity that corresponds to the GUID
+     * Adds/Updates the property to/in the entity that corresponds to the GUID
      * @param guid entity id
      * @param property property name
      * @param value    property value

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
 
b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
index b44ce60..ae92b29 100755
--- 
a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
+++ 
b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
@@ -77,7 +77,6 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
     private static final Logger LOG = 
LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
     private static final String FULL_TEXT_DELIMITER = " ";
-
     private static final String EDGE_LABEL_PREFIX = "__";
 
     private final TypedInstanceToGraphMapper instanceToGraphMapper = new 
TypedInstanceToGraphMapper();
@@ -117,7 +116,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
     @Override
     public String getTraitLabel(IDataType<?> dataType, String traitName) {
-        return dataType.getName() + "." + traitName;
+        return getTraitLabel(dataType.getName(), traitName);
     }
 
     @Override
@@ -137,6 +136,10 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         return EDGE_LABEL_PREFIX + typeName + "." + attrName;
     }
 
+    public String getTraitLabel(String typeName, String attrName) {
+        return typeName + "." + attrName;
+    }
+
     public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo 
aInfo) throws AtlasException {
         IDataType dataType = typeSystem.getDataType(IDataType.class, 
typedInstance.getTypeName());
         return getEdgeLabel(dataType, aInfo);
@@ -263,9 +266,10 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
             // add the trait instance as a new vertex
             final String typeName = getTypeName(instanceVertex);
+
             instanceToGraphMapper
-                    .mapTraitInstanceToVertex(traitInstance, 
getIdFromVertex(typeName, instanceVertex), typeName,
-                            instanceVertex, Collections.<Id, 
Vertex>emptyMap());
+                    .mapTraitInstanceToVertex(traitInstance, 
getIdFromVertex(typeName, instanceVertex), 
typeSystem.getDataType(ClassType.class, typeName),
+                        instanceVertex, Collections.<Id, Vertex>emptyMap());
 
             // update the traits in entity once adding trait instance is 
successful
             GraphHelper.addProperty(instanceVertex, 
Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
@@ -298,7 +302,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
             }
 
             final String entityTypeName = getTypeName(instanceVertex);
-            String relationshipLabel = getEdgeLabel(entityTypeName, 
traitNameToBeDeleted);
+            String relationshipLabel = getTraitLabel(entityTypeName, 
traitNameToBeDeleted);
             Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, 
relationshipLabel).iterator();
             if (results.hasNext()) { // there should only be one edge for this 
label
                 final Edge traitEdge = results.next();
@@ -384,7 +388,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         return getQualifiedName(dataType, attributeInfo.name);
     }
 
-    String getQualifiedName(IDataType dataType, String attributeName) throws 
AtlasException {
+    public static String getQualifiedName(IDataType dataType, String 
attributeName) throws AtlasException {
         return dataType.getTypeCategory() == DataTypes.TypeCategory.STRUCT ? 
dataType.getName() + "." + attributeName
                 // else class or trait
                 : ((HierarchicalType) 
dataType).getQualifiedName(attributeName);
@@ -683,7 +687,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                 ITypedStruct traitInstance = (ITypedStruct) 
typedInstance.getTrait(traitName);
 
                 // add the attributes for the trait instance
-                mapTraitInstanceToVertex(traitInstance, typedInstance, 
instanceVertex,
+                mapTraitInstanceToVertex(traitInstance, typedInstance.getId(), 
classType, instanceVertex,
                         entityProcessor.idToVertexMap);
             }
         }
@@ -870,15 +874,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
             return structInstanceVertex;
         }
 
-        private void mapTraitInstanceToVertex(ITypedStruct traitInstance, 
ITypedReferenceableInstance typedInstance,
-                Vertex parentInstanceVertex, Map<Id, Vertex> idToVertexMap) 
throws AtlasException {
-            // add a new vertex for the struct or trait instance
-            mapTraitInstanceToVertex(traitInstance, typedInstance.getId(), 
typedInstance.getTypeName(),
-                    parentInstanceVertex, idToVertexMap);
-        }
-
         private void mapTraitInstanceToVertex(ITypedStruct traitInstance, Id 
typedInstanceId,
-                String typedInstanceTypeName, Vertex parentInstanceVertex, 
Map<Id, Vertex> idToVertexMap)
+            IDataType entityType, Vertex parentInstanceVertex, Map<Id, Vertex> 
idToVertexMap)
         throws AtlasException {
             // add a new vertex for the struct or trait instance
             final String traitName = traitInstance.getTypeName();
@@ -892,7 +889,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                     traitInstance.fieldMapping().fields, idToVertexMap, false);
 
             // add an edge to the newly created vertex from the parent
-            String relationshipLabel = getEdgeLabel(typedInstanceTypeName, 
traitName);
+            String relationshipLabel = getTraitLabel(entityType, traitName);
             GraphHelper.addEdge(titanGraph, parentInstanceVertex, 
traitInstanceVertex, relationshipLabel);
         }
 
@@ -937,7 +934,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
     public final class GraphToTypedInstanceMapper {
 
         public ITypedReferenceableInstance mapGraphToTypedInstance(String 
guid, Vertex instanceVertex)
-        throws AtlasException {
+            throws AtlasException {
 
             LOG.debug("Mapping graph root vertex {} to typed instance for guid 
{}", instanceVertex, guid);
             String typeName = 
instanceVertex.getProperty(Constants.ENTITY_TYPE_PROPERTY_KEY);
@@ -948,7 +945,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
             ClassType classType = typeSystem.getDataType(ClassType.class, 
typeName);
             ITypedReferenceableInstance typedInstance =
-                    classType.createInstance(id, traits.toArray(new 
String[traits.size()]));
+                classType.createInstance(id, traits.toArray(new 
String[traits.size()]));
 
             mapVertexToInstance(instanceVertex, typedInstance, 
classType.fieldMapping().fields);
             mapVertexToInstanceTraits(instanceVertex, typedInstance, traits);
@@ -957,7 +954,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         }
 
         private void mapVertexToInstanceTraits(Vertex instanceVertex, 
ITypedReferenceableInstance typedInstance,
-                List<String> traits) throws AtlasException {
+            List<String> traits) throws AtlasException {
             for (String traitName : traits) {
                 LOG.debug("mapping trait {} to instance", traitName);
                 TraitType traitType = typeSystem.getDataType(TraitType.class, 
traitName);
@@ -966,10 +963,10 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         }
 
         public void mapVertexToInstance(Vertex instanceVertex, ITypedInstance 
typedInstance,
-                Map<String, AttributeInfo> fields) throws AtlasException {
+            Map<String, AttributeInfo> fields) throws AtlasException {
 
             LOG.debug("Mapping vertex {} to instance {} for fields", 
instanceVertex, typedInstance.getTypeName(),
-                    fields);
+                fields);
             for (AttributeInfo attributeInfo : fields.values()) {
                 mapVertexToAttribute(instanceVertex, typedInstance, 
attributeInfo);
             }
@@ -977,7 +974,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
 
         private void mapVertexToAttribute(Vertex instanceVertex, 
ITypedInstance typedInstance,
-                AttributeInfo attributeInfo) throws AtlasException {
+            AttributeInfo attributeInfo) throws AtlasException {
             LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
             final IDataType dataType = attributeInfo.dataType();
             final String vertexPropertyName = getQualifiedName(typedInstance, 
attributeInfo);
@@ -993,8 +990,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                 }
 
                 typedInstance.set(attributeInfo.name,
-                        
dataType.convert(instanceVertex.<String>getProperty(vertexPropertyName),
-                                Multiplicity.REQUIRED));
+                    
dataType.convert(instanceVertex.<String>getProperty(vertexPropertyName),
+                        Multiplicity.REQUIRED));
                 break;
 
             case ARRAY:
@@ -1015,8 +1012,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
             case CLASS:
                 String relationshipLabel = getEdgeLabel(typedInstance, 
attributeInfo);
-                Object idOrInstance = 
mapClassReferenceToVertex(instanceVertex, attributeInfo, relationshipLabel,
-                        attributeInfo.dataType());
+                Object idOrInstance = 
mapVertexToClassReference(instanceVertex, attributeInfo, relationshipLabel,
+                    attributeInfo.dataType());
                 typedInstance.set(attributeInfo.name, idOrInstance);
                 break;
 
@@ -1025,8 +1022,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
             }
         }
 
-        private Object mapClassReferenceToVertex(Vertex instanceVertex, 
AttributeInfo attributeInfo,
-                String relationshipLabel, IDataType dataType) throws 
AtlasException {
+        private Object mapVertexToClassReference(Vertex instanceVertex, 
AttributeInfo attributeInfo,
+            String relationshipLabel, IDataType dataType) throws 
AtlasException {
             LOG.debug("Finding edge for {} -> label {} ", instanceVertex, 
relationshipLabel);
             Iterator<Edge> results = instanceVertex.getEdges(Direction.OUT, 
relationshipLabel).iterator();
             if (results.hasNext()) {
@@ -1040,8 +1037,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                         return mapGraphToTypedInstance(guid, referenceVertex);
                     } else {
                         Id referenceId =
-                                new Id(guid, 
referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
-                                        dataType.getName());
+                            new Id(guid, 
referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
+                                dataType.getName());
                         LOG.debug("Found non-composite, adding id {} ", 
referenceId);
                         return referenceId;
                     }
@@ -1053,7 +1050,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
         @SuppressWarnings("unchecked")
         private void mapVertexToArrayInstance(Vertex instanceVertex, 
ITypedInstance typedInstance,
-                AttributeInfo attributeInfo, String propertyName) throws 
AtlasException {
+            AttributeInfo attributeInfo, String propertyName) throws 
AtlasException {
             LOG.debug("mapping vertex {} to array {}", instanceVertex, 
attributeInfo.name);
             List list = instanceVertex.getProperty(propertyName);
             if (list == null || list.size() == 0) {
@@ -1065,14 +1062,14 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
             ArrayList values = new ArrayList();
             for (Object listElement : list) {
                 values.add(mapVertexToCollectionEntry(instanceVertex, 
attributeInfo, elementType, listElement,
-                        propertyName));
+                    propertyName));
             }
 
             typedInstance.set(attributeInfo.name, values);
         }
 
         private Object mapVertexToCollectionEntry(Vertex instanceVertex, 
AttributeInfo attributeInfo,
-                IDataType elementType, Object value, String propertyName) 
throws AtlasException {
+            IDataType elementType, Object value, String propertyName) throws 
AtlasException {
             String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
             switch (elementType.getTypeCategory()) {
             case PRIMITIVE:
@@ -1087,10 +1084,10 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
             case STRUCT:
                 return getStructInstanceFromVertex(instanceVertex, 
elementType, attributeInfo.name, edgeLabel,
-                        (String) value);
+                    (String) value);
 
             case CLASS:
-                return mapClassReferenceToVertex(instanceVertex, 
attributeInfo, edgeLabel, elementType, (String) value);
+                return mapVertexToClassReference(instanceVertex, 
attributeInfo, edgeLabel, elementType, (String) value);
 
             default:
                 break;
@@ -1101,7 +1098,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
         @SuppressWarnings("unchecked")
         private void mapVertexToMapInstance(Vertex instanceVertex, 
ITypedInstance typedInstance,
-                AttributeInfo attributeInfo, final String propertyName) throws 
AtlasException {
+            AttributeInfo attributeInfo, final String propertyName) throws 
AtlasException {
             LOG.debug("mapping vertex {} to array {}", instanceVertex, 
attributeInfo.name);
             List<String> keys = instanceVertex.getProperty(propertyName);
             if (keys == null || keys.size() == 0) {
@@ -1115,14 +1112,14 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                 String keyPropertyName = propertyName + "." + key;
                 Object keyValue = instanceVertex.getProperty(keyPropertyName);
                 values.put(key,
-                        mapVertexToCollectionEntry(instanceVertex, 
attributeInfo, valueType, keyValue, propertyName));
+                    mapVertexToCollectionEntry(instanceVertex, attributeInfo, 
valueType, keyValue, propertyName));
             }
 
             typedInstance.set(attributeInfo.name, values);
         }
 
         private ITypedStruct getStructInstanceFromVertex(Vertex 
instanceVertex, IDataType elemType,
-                String attributeName, String relationshipLabel, String edgeId) 
throws AtlasException {
+            String attributeName, String relationshipLabel, String edgeId) 
throws AtlasException {
             LOG.debug("Finding edge for {} -> label {} ", instanceVertex, 
relationshipLabel);
             for (Edge edge : instanceVertex.getEdges(Direction.OUT, 
relationshipLabel)) {
                 if (edgeId.equals(String.valueOf(edge.getId()))) {
@@ -1131,10 +1128,9 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
 
                     if (structInstanceVertex != null) {
                         LOG.debug("Found struct instance vertex {}, mapping to 
instance {} ", structInstanceVertex,
-                                elemType.getName());
+                            elemType.getName());
                         StructType structType = 
typeSystem.getDataType(StructType.class, elemType.getName());
                         ITypedStruct structInstance = 
structType.createInstance();
-
                         mapVertexToInstance(structInstanceVertex, 
structInstance, structType.fieldMapping().fields);
                         return structInstance;
                     }
@@ -1146,8 +1142,8 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
             return null;
         }
 
-        private Object mapClassReferenceToVertex(Vertex instanceVertex, 
AttributeInfo attributeInfo,
-                String relationshipLabel, IDataType dataType, String edgeId) 
throws AtlasException {
+        private Object mapVertexToClassReference(Vertex instanceVertex, 
AttributeInfo attributeInfo,
+            String relationshipLabel, IDataType dataType, String edgeId) 
throws AtlasException {
             LOG.debug("Finding edge for {} -> label {} ", instanceVertex, 
relationshipLabel);
             for (Edge edge : instanceVertex.getEdges(Direction.OUT, 
relationshipLabel)) {
                 if (edgeId.equals(String.valueOf(edge.getId()))) {
@@ -1155,15 +1151,15 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                     if (referenceVertex != null) {
                         final String guid = 
referenceVertex.getProperty(Constants.GUID_PROPERTY_KEY);
                         LOG.debug("Found vertex {} for label {} with guid {}", 
referenceVertex, relationshipLabel,
-                                guid);
+                            guid);
                         if (attributeInfo.isComposite) {
                             //Also, when you retrieve a type's instance, you 
get the complete object graph of the composites
                             LOG.debug("Found composite, mapping vertex to 
instance");
                             return mapGraphToTypedInstance(guid, 
referenceVertex);
                         } else {
                             Id referenceId =
-                                    new Id(guid, 
referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
-                                            dataType.getName());
+                                new Id(guid, 
referenceVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
+                                    dataType.getName());
                             LOG.debug("Found non-composite, adding id {} ", 
referenceId);
                             return referenceId;
                         }
@@ -1177,7 +1173,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         }
 
         private void mapVertexToStructInstance(Vertex instanceVertex, 
ITypedInstance typedInstance,
-                AttributeInfo attributeInfo) throws AtlasException {
+            AttributeInfo attributeInfo) throws AtlasException {
             LOG.debug("mapping vertex {} to struct {}", instanceVertex, 
attributeInfo.name);
             StructType structType = typeSystem.getDataType(StructType.class, 
attributeInfo.dataType().getName());
             ITypedStruct structInstance = structType.createInstance();
@@ -1189,7 +1185,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                 final Vertex structInstanceVertex = 
edge.getVertex(Direction.IN);
                 if (structInstanceVertex != null) {
                     LOG.debug("Found struct instance vertex {}, mapping to 
instance {} ", structInstanceVertex,
-                            structInstance.getTypeName());
+                        structInstance.getTypeName());
                     mapVertexToInstance(structInstanceVertex, structInstance, 
structType.fieldMapping().fields);
                     break;
                 }
@@ -1197,21 +1193,21 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         }
 
         private void mapVertexToTraitInstance(Vertex instanceVertex, 
ITypedReferenceableInstance typedInstance,
-                String traitName, TraitType traitType) throws AtlasException {
+            String traitName, TraitType traitType) throws AtlasException {
             ITypedStruct traitInstance = (ITypedStruct) 
typedInstance.getTrait(traitName);
 
             mapVertexToTraitInstance(instanceVertex, 
typedInstance.getTypeName(), traitName, traitType, traitInstance);
         }
 
         private void mapVertexToTraitInstance(Vertex instanceVertex, String 
typedInstanceTypeName, String traitName,
-                TraitType traitType, ITypedStruct traitInstance) throws 
AtlasException {
-            String relationshipLabel = getEdgeLabel(typedInstanceTypeName, 
traitName);
+            TraitType traitType, ITypedStruct traitInstance) throws 
AtlasException {
+            String relationshipLabel = getTraitLabel(typedInstanceTypeName, 
traitName);
             LOG.debug("Finding edge for {} -> label {} ", instanceVertex, 
relationshipLabel);
             for (Edge edge : instanceVertex.getEdges(Direction.OUT, 
relationshipLabel)) {
                 final Vertex traitInstanceVertex = 
edge.getVertex(Direction.IN);
                 if (traitInstanceVertex != null) {
                     LOG.debug("Found trait instance vertex {}, mapping to 
instance {} ", traitInstanceVertex,
-                            traitInstance.getTypeName());
+                        traitInstance.getTypeName());
                     mapVertexToInstance(traitInstanceVertex, traitInstance, 
traitType.fieldMapping().fields);
                     break;
                 }
@@ -1219,7 +1215,7 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
         }
 
         private void mapVertexToPrimitive(Vertex instanceVertex, 
ITypedInstance typedInstance,
-                AttributeInfo attributeInfo) throws AtlasException {
+            AttributeInfo attributeInfo) throws AtlasException {
             LOG.debug("Adding primitive {} from vertex {}", attributeInfo, 
instanceVertex);
             final String vertexPropertyName = getQualifiedName(typedInstance, 
attributeInfo);
             if (instanceVertex.getProperty(vertexPropertyName) == null) {
@@ -1246,11 +1242,40 @@ public class GraphBackedMetadataRepository implements 
MetadataRepository {
                 typedInstance.setDouble(attributeInfo.name, 
instanceVertex.<Double>getProperty(vertexPropertyName));
             } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
                 typedInstance
-                        .setBigDecimal(attributeInfo.name, 
instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
+                    .setBigDecimal(attributeInfo.name, 
instanceVertex.<BigDecimal>getProperty(vertexPropertyName));
             } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
                 final Long dateVal = 
instanceVertex.<Long>getProperty(vertexPropertyName);
                 typedInstance.setDate(attributeInfo.name, new Date(dateVal));
             }
         }
+
+        public ITypedInstance getReferredEntity(String edgeId, IDataType<?> 
referredType) throws AtlasException {
+            final Edge edge = titanGraph.getEdge(edgeId);
+            if(edge != null) {
+                final Vertex referredVertex = edge.getVertex(Direction.IN);
+                if (referredVertex != null) {
+                    switch (referredType.getTypeCategory()) {
+                    case STRUCT:
+                        LOG.debug("Found struct instance vertex {}, mapping to 
instance {} ", referredVertex,
+                            referredType.getName());
+                        StructType structType = (StructType)referredType;
+                        ITypedStruct instance = structType.createInstance();
+                        Map<String, AttributeInfo> fields = 
structType.fieldMapping().fields;
+                        mapVertexToInstance(referredVertex, instance, fields);
+                        return instance;
+                    case CLASS:
+                        //TODO isComposite handling for class loads
+                        final String guid = 
referredVertex.getProperty(Constants.GUID_PROPERTY_KEY);
+                        Id referenceId =
+                            new Id(guid, 
referredVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY),
+                                referredType.getName());
+                        return referenceId;
+                    default:
+                        throw new UnsupportedOperationException("Loading " + 
referredType.getTypeCategory() + " is not supported");
+                    }
+                }
+            }
+            return null;
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
 
b/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
index 6605ae7..b634ace 100755
--- 
a/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
+++ 
b/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
@@ -48,9 +48,9 @@ public class TitanGraphProvider implements 
GraphProvider<TitanGraph> {
      */
     private static final String GRAPH_PREFIX = "atlas.graph";
 
-    private static TitanGraph graphInstance;
+    private static volatile TitanGraph graphInstance;
 
-    private static Configuration getConfiguration() throws AtlasException {
+    public static Configuration getConfiguration() throws AtlasException {
         Configuration configProperties = ApplicationProperties.get();
         return ApplicationProperties.getSubsetConfiguration(configProperties, 
GRAPH_PREFIX);
     }
@@ -84,10 +84,7 @@ public class TitanGraphProvider implements 
GraphProvider<TitanGraph> {
         }
     }
 
-    @Override
-    @Singleton
-    @Provides
-    public TitanGraph get() {
+    public static TitanGraph getGraphInstance() {
         if(graphInstance == null) {
             synchronized (TitanGraphProvider.class) {
                 if(graphInstance == null) {
@@ -104,4 +101,11 @@ public class TitanGraphProvider implements 
GraphProvider<TitanGraph> {
         }
         return graphInstance;
     }
+
+    @Override
+    @Singleton
+    @Provides
+    public TitanGraph get() {
+        return getGraphInstance();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/Expressions.scala 
b/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
index 24fa917..d591c20 100755
--- a/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
+++ b/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
@@ -18,7 +18,11 @@
 
 package org.apache.atlas.query
 
+import java.util
+
+import com.google.common.collect.ImmutableCollection
 import org.apache.atlas.AtlasException
+import org.apache.atlas.typesystem.ITypedInstance
 import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, 
TypeCategory}
 import org.apache.atlas.typesystem.types._
 
@@ -468,6 +472,22 @@ object Expressions {
         }
     }
 
+    import scala.collection.JavaConversions._
+    case class ListLiteral[_](dataType: ArrayType, rawValue: 
List[Expressions.Literal[_]]) extends Expression with LeafNode {
+
+        val lc : java.util.List[Expressions.Literal[_]] = rawValue
+        val value = if (rawValue != null) dataType.convert(lc, 
Multiplicity.REQUIRED)
+
+        override def toString = value match {
+            case l: Seq[_]
+               => l.mkString("[",",","]")
+            case c: ImmutableCollection[_] =>
+                c.asList.mkString("[",",","]")
+            case x =>
+                x.toString
+        }
+    }
+
     def literal[T](typ: PrimitiveType[T], rawValue: Any) = new Literal[T](typ, 
rawValue)
 
     def boolean(rawValue: Any) = literal(DataTypes.BOOLEAN_TYPE, rawValue)
@@ -492,6 +512,12 @@ object Expressions {
 
     def date(rawValue: Any) = literal(DataTypes.DATE_TYPE, rawValue)
 
+    def list[_ <: PrimitiveType[_]](listElements: 
List[Expressions.Literal[_]]) =  {
+        
listLiteral(TypeSystem.getInstance().defineArrayType(listElements.head.dataType),
 listElements)
+    }
+
+    def listLiteral[_ <: PrimitiveType[_]](typ: ArrayType, rawValue: 
List[Expressions.Literal[_]]) = new ListLiteral(typ, rawValue)
+
     case class ArithmeticExpression(symbol: String,
                                     left: Expression,
                                     right: Expression)
@@ -601,7 +627,9 @@ object Expressions {
                     s"datatype. Can not resolve due to unresolved children")
             }
 
-            if(left.dataType == DataTypes.DATE_TYPE) {
+            if(left.dataType.getName.startsWith(DataTypes.ARRAY_TYPE_PREFIX)) {
+                left.dataType;
+            } else if(left.dataType == DataTypes.DATE_TYPE) {
                 DataTypes.DATE_TYPE
             } else if (left.dataType != DataTypes.STRING_TYPE || 
right.dataType != DataTypes.STRING_TYPE) {
                 TypeUtils.combinedType(left.dataType, right.dataType)
@@ -651,7 +679,6 @@ object Expressions {
     val GEN_COL_ALIAS_PREFIX = "_col"
 
     case class SelectExpression(child: Expression, selectList: 
List[Expression]) extends Expression {
-
         val children = List(child) ::: selectList
         lazy val selectListWithAlias = selectList.zipWithIndex map {
             case (s: AliasExpression, _) => s

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
----------------------------------------------------------------------
diff --git 
a/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
 
b/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
index 2cec503..d857a66 100755
--- 
a/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
+++ 
b/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
@@ -18,18 +18,23 @@
 
 package org.apache.atlas.query
 
+import java.util
 import java.util.Date
 
 import com.thinkaurelius.titan.core.TitanVertex
-import com.tinkerpop.blueprints.Direction
+import com.tinkerpop.blueprints.{Vertex, Direction}
+import org.apache.atlas.AtlasException
 import org.apache.atlas.query.Expressions.{ComparisonExpression, 
ExpressionException}
 import org.apache.atlas.query.TypeUtils.FieldInfo
+import org.apache.atlas.repository.graph.GraphBackedMetadataRepository
 import org.apache.atlas.typesystem.persistence.Id
 import org.apache.atlas.typesystem.types.DataTypes._
 import org.apache.atlas.typesystem.types._
 import org.apache.atlas.typesystem.{ITypedInstance, 
ITypedReferenceableInstance}
 
 import scala.collection.JavaConversions._
+import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
 
 /**
  * Represents the Bridge between the QueryProcessor and the Graph Persistence 
scheme used.
@@ -186,13 +191,15 @@ object GraphPersistenceStrategy1 extends 
GraphPersistenceStrategies {
     val superTypeAttributeName = "superTypeNames"
     val idAttributeName = "guid"
 
-    def edgeLabel(dataType: IDataType[_], aInfo: AttributeInfo) = 
s"${dataType.getName}.${aInfo.name}"
+    def edgeLabel(dataType: IDataType[_], aInfo: AttributeInfo) = 
s"__${dataType.getName}.${aInfo.name}"
+
+    def edgeLabel(propertyName: String) = s"__${propertyName}"
 
     val fieldPrefixInSelect = "it"
 
     def traitLabel(cls: IDataType[_], traitName: String) = 
s"${cls.getName}.$traitName"
 
-    def fieldNameInVertex(dataType: IDataType[_], aInfo: AttributeInfo) = 
aInfo.name
+    def fieldNameInVertex(dataType: IDataType[_], aInfo: AttributeInfo) = 
GraphBackedMetadataRepository.getQualifiedName(dataType, aInfo.name)
 
     def getIdFromVertex(dataTypeNm: String, v: TitanVertex): Id =
         new Id(v.getId.toString, 0, dataTypeNm)
@@ -209,6 +216,8 @@ object GraphPersistenceStrategy1 extends 
GraphPersistenceStrategies {
     def constructInstance[U](dataType: IDataType[U], v: AnyRef): U = {
         dataType.getTypeCategory match {
             case DataTypes.TypeCategory.PRIMITIVE => dataType.convert(v, 
Multiplicity.OPTIONAL)
+            case DataTypes.TypeCategory.ARRAY =>
+                dataType.convert(v, Multiplicity.OPTIONAL)
             case DataTypes.TypeCategory.STRUCT
               if dataType.getName == 
TypeSystem.getInstance().getIdType.getName => {
               val sType = dataType.asInstanceOf[StructType]
@@ -278,7 +287,7 @@ object GraphPersistenceStrategy1 extends 
GraphPersistenceStrategies {
             case DataTypes.TypeCategory.PRIMITIVE => 
loadPrimitiveAttribute(dataType, aInfo, i, v)
             case DataTypes.TypeCategory.ENUM => loadEnumAttribute(dataType, 
aInfo, i, v)
             case DataTypes.TypeCategory.ARRAY =>
-                throw new UnsupportedOperationException(s"load for 
${aInfo.dataType()} not supported")
+                loadArrayAttribute(dataType, aInfo, i, v)
             case DataTypes.TypeCategory.MAP =>
                 throw new UnsupportedOperationException(s"load for 
${aInfo.dataType()} not supported")
             case DataTypes.TypeCategory.STRUCT => 
loadStructAttribute(dataType, aInfo, i, v)
@@ -314,9 +323,26 @@ object GraphPersistenceStrategy1 extends 
GraphPersistenceStrategies {
         }
     }
 
-    private def loadStructAttribute(dataType: IDataType[_], aInfo: 
AttributeInfo,
+
+    private def loadArrayAttribute[T](dataType: IDataType[_], aInfo: 
AttributeInfo,
                                     i: ITypedInstance, v: TitanVertex): Unit = 
{
-        val eLabel = edgeLabel(FieldInfo(dataType, aInfo, null))
+        import scala.collection.JavaConversions._
+        val list: java.util.List[_] = v.getProperty(aInfo.name)
+        val arrayType: DataTypes.ArrayType = 
aInfo.dataType.asInstanceOf[ArrayType]
+
+        var values = new util.ArrayList[Any]
+        list.foreach( listElement =>
+            values += mapVertexToCollectionEntry(v, aInfo, 
arrayType.getElemType, i, listElement)
+        )
+        i.set(aInfo.name, values)
+    }
+
+    private def loadStructAttribute(dataType: IDataType[_], aInfo: 
AttributeInfo,
+                                    i: ITypedInstance, v: TitanVertex, 
edgeLbl: Option[String] = None): Unit = {
+        val eLabel = edgeLbl match {
+            case Some(x) => x
+            case None => edgeLabel(FieldInfo(dataType, aInfo, null))
+        }
         val edges = v.getEdges(Direction.OUT, eLabel)
         val sVertex = 
edges.iterator().next().getVertex(Direction.IN).asInstanceOf[TitanVertex]
         if (aInfo.dataType().getTypeCategory == DataTypes.TypeCategory.STRUCT) 
{
@@ -329,5 +355,22 @@ object GraphPersistenceStrategy1 extends 
GraphPersistenceStrategies {
             i.set(aInfo.name, cInstance)
         }
     }
+
+
+
+    private def mapVertexToCollectionEntry(instanceVertex: TitanVertex, 
attributeInfo: AttributeInfo, elementType: IDataType[_], i: ITypedInstance,  
value: Any): Any = {
+        elementType.getTypeCategory match {
+            case DataTypes.TypeCategory.PRIMITIVE => value
+            case DataTypes.TypeCategory.ENUM => value
+            case DataTypes.TypeCategory.STRUCT =>
+                throw new UnsupportedOperationException(s"load for 
${attributeInfo.dataType()} not supported")
+            case DataTypes.TypeCategory.TRAIT =>
+                throw new UnsupportedOperationException(s"load for 
${attributeInfo.dataType()} not supported")
+            case DataTypes.TypeCategory.CLASS => 
//loadStructAttribute(elementType, attributeInfo, i, v)
+                throw new UnsupportedOperationException(s"load for 
${attributeInfo.dataType()} not supported")
+            case _ =>
+                throw new UnsupportedOperationException(s"load for 
${attributeInfo.dataType()} not supported")
+        }
+    }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
----------------------------------------------------------------------
diff --git 
a/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala 
b/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
index 75fa916..f1590a8 100755
--- a/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
+++ b/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
@@ -77,6 +77,7 @@ trait SelectExpressionHandling {
         val l = ArrayBuffer[String]()
         e.traverseUp {
             case BackReference(alias, _, _) => l += alias
+            case ClassExpression(clsName) => l += clsName
         }
         l.toSet.toList
     }
@@ -140,14 +141,19 @@ class GremlinTranslator(expr: Expression,
     }
 
     val validateComparisonForm: PartialFunction[Expression, Unit] = {
-        case c@ComparisonExpression(_, left, right) =>
+        case c@ComparisonExpression(op, left, right) =>
             if (!left.isInstanceOf[FieldExpression]) {
                 throw new GremlinTranslationException(c, s"lhs of comparison 
is not a field")
             }
-            if (!right.isInstanceOf[Literal[_]]) {
+            if (!right.isInstanceOf[Literal[_]] && 
!right.isInstanceOf[ListLiteral[_]]) {
                 throw new GremlinTranslationException(c,
                     s"rhs of comparison is not a literal")
             }
+
+            if(right.isInstanceOf[ListLiteral[_]] && (!op.equals("=") && 
!op.equals("!="))) {
+                throw new GremlinTranslationException(c,
+                    s"operation not supported with list literal")
+            }
             ()
     }
 
@@ -201,7 +207,8 @@ class GremlinTranslator(expr: Expression,
             typeTestExpression(clsName)
         case TraitExpression(clsName) =>
             typeTestExpression(clsName)
-        case fe@FieldExpression(fieldName, fInfo, child) if 
fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE => {
+        case fe@FieldExpression(fieldName, fInfo, child)
+            if fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE || 
fe.dataType.getTypeCategory == TypeCategory.ARRAY => {
             val fN = "\"" + 
gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo) + "\""
             child match {
                 case Some(e) => s"${genQuery(e, inSelect)}.$fN"
@@ -218,15 +225,14 @@ class GremlinTranslator(expr: Expression,
                 case None => step
             }
         }
-        case fe@FieldExpression(fieldName, fInfo, child)
-          if fInfo.traitName != null => {
-          val direction = gPersistenceBehavior.instanceToTraitEdgeDirection
-          val edgeLbl = gPersistenceBehavior.edgeLabel(fInfo)
-          val step = s"""$direction("$edgeLbl")"""
-          child match {
-            case Some(e) => s"${genQuery(e, inSelect)}.$step"
-            case None => step
-          }
+        case fe@FieldExpression(fieldName, fInfo, child) if fInfo.traitName != 
null => {
+            val direction = gPersistenceBehavior.instanceToTraitEdgeDirection
+            val edgeLbl = gPersistenceBehavior.edgeLabel(fInfo)
+            val step = s"""$direction("$edgeLbl")"""
+            child match {
+              case Some(e) => s"${genQuery(e, inSelect)}.$step"
+              case None => step
+            }
         }
         case c@ComparisonExpression(symb, f@FieldExpression(fieldName, fInfo, 
ch), l) => {
           val QUOTE = "\"";
@@ -294,12 +300,20 @@ class GremlinTranslator(expr: Expression,
             s"""out("${gPersistenceBehavior.traitLabel(clsExp.dataType, 
traitName)}")"""
         case isTraitUnaryExpression(traitName, child) =>
             s"""out("${gPersistenceBehavior.traitLabel(child.dataType, 
traitName)}")"""
-        case hasFieldLeafExpression(fieldName, Some(clsExp)) =>
-            s"""has("$fieldName")"""
+        case hasFieldLeafExpression(fieldName, clsExp) => clsExp match {
+            case None => s"""has("$fieldName")"""
+            case Some(x) =>
+             x match {
+                 case c: ClassExpression =>
+                     
s"""has("${x.asInstanceOf[ClassExpression].clsName}.$fieldName")"""
+                 case default => s"""has("$fieldName")"""
+             }
+        }
         case hasFieldUnaryExpression(fieldName, child) =>
             s"""${genQuery(child, inSelect)}.has("$fieldName")"""
         case ArithmeticExpression(symb, left, right) => s"${genQuery(left, 
inSelect)} $symb ${genQuery(right, inSelect)}"
         case l: Literal[_] => l.toString
+        case list: ListLiteral[_] => list.toString
         case in@TraitInstanceExpression(child) => {
           val direction = gPersistenceBehavior.traitToInstanceEdgeDirection
           s"${genQuery(child, inSelect)}.$direction()"
@@ -356,6 +370,7 @@ class GremlinTranslator(expr: Expression,
     }
 
     /*
+     * TODO
      * Translation Issues:
      * 1. back references in filters. For e.g. testBackreference: 'DB as db 
Table where (db.name = "Reporting")'
      *    this is translated to:

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala 
b/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
index 5b129fb..d33a8e9 100755
--- a/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
+++ b/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
@@ -34,6 +34,8 @@ trait QueryKeywords {
 
     protected implicit def asParser(k: Keyword): Parser[String] = k.str
 
+    protected val LIST_LPAREN = Keyword("[")
+    protected val LIST_RPAREN = Keyword("]")
     protected val LPAREN = Keyword("(")
     protected val RPAREN = Keyword(")")
     protected val EQ = Keyword("=")
@@ -222,8 +224,11 @@ class QueryParser extends StandardTokenParsers with 
QueryKeywords with Expressio
 
     def multiERight = (STAR | DIV) ~ atomE ^^ { case op ~ r => (op, r)}
 
+    def atomE = literal | identifier | LPAREN ~> expr <~ RPAREN | listLiteral
 
-    def atomE = literal | identifier | LPAREN ~> expr <~ RPAREN
+    def listLiteral = LIST_LPAREN ~ rep1sep(literal, COMMA) ~ LIST_RPAREN ^^ {
+        case lp ~ le ~ rp => list(le)
+    }
 
     def identifier = rep1sep(ident, DOT) ^^ { l => l match {
         case h :: Nil => id(h)
@@ -237,7 +242,7 @@ class QueryParser extends StandardTokenParsers with 
QueryKeywords with Expressio
 
     def literal = booleanConstant ^^ {
         boolean(_)
-    } |
+        } |
         intConstant ^^ {
             int(_)
         } |

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/Resolver.scala 
b/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
index 142ba8d..c7e1e81 100755
--- a/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
+++ b/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
@@ -112,7 +112,7 @@ object FieldValidator extends PartialFunction[Expression, 
Expression] {
         case fe@FieldExpression(fNm, fInfo, Some(child)) if isSrc(child) =>
             throw new ExpressionException(fe, s"srcType of field doesn't match 
input type")
         case hasFieldUnaryExpression(fNm, child) if child.dataType == 
srcDataType =>
-            hasFieldLeafExpression(fNm)
+            hasFieldLeafExpression(fNm, Some(child))
         case hF@hasFieldUnaryExpression(fNm, child) if isSrc(child) =>
             throw new ExpressionException(hF, s"srcType of field doesn't match 
input type")
         case isTraitUnaryExpression(fNm, child) if child.dataType == 
srcDataType =>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/test/java/org/apache/atlas/BaseHiveRepositoryTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/BaseHiveRepositoryTest.java 
b/repository/src/test/java/org/apache/atlas/BaseHiveRepositoryTest.java
new file mode 100644
index 0000000..6b715ac
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/BaseHiveRepositoryTest.java
@@ -0,0 +1,357 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
+import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
+import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
+import org.apache.atlas.repository.graph.GraphProvider;
+import org.apache.atlas.services.DefaultMetadataService;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.TypesDef;
+import org.apache.atlas.typesystem.json.InstanceSerialization;
+import org.apache.atlas.typesystem.json.TypesSerialization;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.EnumTypeDefinition;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.typesystem.types.TypeUtils;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Guice;
+
+import javax.inject.Inject;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+/**
+ *  Base Class to set up hive types and instances for tests
+ */
+@Guice(modules = RepositoryMetadataModule.class)
+public class BaseHiveRepositoryTest {
+
+    @Inject
+    protected DefaultMetadataService metadataService;
+
+    @Inject
+    protected GraphBackedMetadataRepository repository;
+
+    @Inject
+    protected GraphProvider<TitanGraph> graphProvider;
+
+    protected void setUp() throws Exception {
+        setUpTypes();
+        new GraphBackedSearchIndexer(graphProvider);
+        setupInstances();
+        // TestUtils.dumpGraph(graphProvider.get());
+    }
+
+    protected void tearDown() throws Exception {
+        TypeSystem.getInstance().reset();
+        try {
+            graphProvider.get().shutdown();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        try {
+            TitanCleanup.clear(graphProvider.get());
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void setUpTypes() throws Exception {
+        TypesDef typesDef = createTypeDefinitions();
+        String typesAsJSON = TypesSerialization.toJson(typesDef);
+        metadataService.createType(typesAsJSON);
+    }
+
+    private static final String DATABASE_TYPE = "hive_db";
+    private static final String HIVE_TABLE_TYPE = "hive_table";
+    private static final String COLUMN_TYPE = "hive_column";
+    private static final String HIVE_PROCESS_TYPE = "hive_process";
+    private static final String STORAGE_DESC_TYPE = "StorageDesc";
+    private static final String VIEW_TYPE = "View";
+    private static final String PARTITION_TYPE = "hive_partition";
+
+    TypesDef createTypeDefinitions() {
+        HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
+            .createClassTypeDef(DATABASE_TYPE, null, attrDef("name", 
DataTypes.STRING_TYPE),
+                attrDef("description", DataTypes.STRING_TYPE), 
attrDef("locationUri", DataTypes.STRING_TYPE),
+                attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", 
DataTypes.LONG_TYPE));
+
+        HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
+            .createClassTypeDef(COLUMN_TYPE, null, attrDef("name", 
DataTypes.STRING_TYPE),
+                attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", 
DataTypes.STRING_TYPE));
+
+        HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
+            .createClassTypeDef(STORAGE_DESC_TYPE, null,
+                new AttributeDefinition("cols", String.format("array<%s>", 
COLUMN_TYPE),
+                    Multiplicity.COLLECTION, false, null),
+                attrDef("location", DataTypes.STRING_TYPE),
+                attrDef("inputFormat", DataTypes.STRING_TYPE), 
attrDef("outputFormat", DataTypes.STRING_TYPE),
+                attrDef("compressed", DataTypes.STRING_TYPE, 
Multiplicity.REQUIRED, false, null));
+
+
+        HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
+            .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
+                attrDef("owner", DataTypes.STRING_TYPE),
+                attrDef("createTime", DataTypes.DATE_TYPE),
+                attrDef("lastAccessTime", DataTypes.LONG_TYPE), 
attrDef("tableType", DataTypes.STRING_TYPE),
+                attrDef("temporary", DataTypes.BOOLEAN_TYPE),
+                new AttributeDefinition("db", DATABASE_TYPE, 
Multiplicity.REQUIRED, false, null),
+                // todo - uncomment this, something is broken
+                new AttributeDefinition("sd", STORAGE_DESC_TYPE,
+                                                Multiplicity.REQUIRED, true, 
null),
+                new AttributeDefinition("columns", 
DataTypes.arrayTypeName(COLUMN_TYPE),
+                    Multiplicity.COLLECTION, true, null));
+
+        HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
+            .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
+                attrDef("userName", DataTypes.STRING_TYPE), 
attrDef("startTime", DataTypes.LONG_TYPE),
+                attrDef("endTime", DataTypes.LONG_TYPE),
+                attrDef("queryText", DataTypes.STRING_TYPE, 
Multiplicity.REQUIRED),
+                attrDef("queryPlan", DataTypes.STRING_TYPE, 
Multiplicity.REQUIRED),
+                attrDef("queryId", DataTypes.STRING_TYPE, 
Multiplicity.REQUIRED),
+                attrDef("queryGraph", DataTypes.STRING_TYPE, 
Multiplicity.REQUIRED));
+
+        HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
+            .createClassTypeDef(VIEW_TYPE, null, attrDef("name", 
DataTypes.STRING_TYPE),
+                new AttributeDefinition("db", DATABASE_TYPE, 
Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("inputTables", 
DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
+                    Multiplicity.COLLECTION, false, null));
+
+        AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
+            new AttributeDefinition("values", 
DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
+                Multiplicity.OPTIONAL, false, null),
+            new AttributeDefinition("table", HIVE_TABLE_TYPE, 
Multiplicity.REQUIRED, false, null),
+            };
+        HierarchicalTypeDefinition<ClassType> partClsDef =
+            new HierarchicalTypeDefinition<>(ClassType.class, PARTITION_TYPE, 
null,
+                attributeDefinitions);
+
+        HierarchicalTypeDefinition<TraitType> dimTraitDef = 
TypesUtil.createTraitTypeDef("Dimension", null);
+
+        HierarchicalTypeDefinition<TraitType> factTraitDef = 
TypesUtil.createTraitTypeDef("Fact", null);
+
+        HierarchicalTypeDefinition<TraitType> metricTraitDef = 
TypesUtil.createTraitTypeDef("Metric", null);
+
+        HierarchicalTypeDefinition<TraitType> etlTraitDef = 
TypesUtil.createTraitTypeDef("ETL", null);
+
+        HierarchicalTypeDefinition<TraitType> piiTraitDef = 
TypesUtil.createTraitTypeDef("PII", null);
+
+        HierarchicalTypeDefinition<TraitType> jdbcTraitDef = 
TypesUtil.createTraitTypeDef("JdbcAccess", null);
+
+        return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), 
ImmutableList.<StructTypeDefinition>of(),
+            ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, 
metricTraitDef, etlTraitDef, jdbcTraitDef),
+            ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, 
tblClsDef, loadProcessClsDef, viewClsDef, partClsDef));
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT) {
+        return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
+        return attrDef(name, dT, m, false, null);
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, 
boolean isComposite,
+        String reverseAttributeName) {
+        Preconditions.checkNotNull(name);
+        Preconditions.checkNotNull(dT);
+        return new AttributeDefinition(name, dT.getName(), m, isComposite, 
reverseAttributeName);
+    }
+
+    private void setupInstances() throws Exception {
+        Id salesDB = database("Sales", "Sales Database", "John ETL", 
"hdfs://host:8000/apps/warehouse/sales");
+
+        Referenceable sd =
+            storageDescriptor("hdfs://host:8000/apps/warehouse/sales", 
"TextInputFormat", "TextOutputFormat", true, ImmutableList.of(column("time_id", 
"int", "time id")));
+
+        List<Referenceable> salesFactColumns = ImmutableList
+            .of(column("time_id", "int", "time id"), column("product_id", 
"int", "product id"),
+                column("customer_id", "int", "customer id", "PII"),
+                column("sales", "double", "product id", "Metric"));
+
+        Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, 
"Joe", "Managed", salesFactColumns, "Fact");
+
+        List<Referenceable> timeDimColumns = ImmutableList
+            .of(column("time_id", "int", "time id"), column("dayOfYear", 
"int", "day Of Year"),
+                column("weekDay", "int", "week Day"));
+
+        Id timeDim = table("time_dim", "time dimension table", salesDB, sd, 
"John Doe", "External", timeDimColumns,
+            "Dimension");
+
+        Id reportingDB =
+            database("Reporting", "reporting database", "Jane BI", 
"hdfs://host:8000/apps/warehouse/reporting");
+
+        Id salesFactDaily =
+            table("sales_fact_daily_mv", "sales fact daily materialized view", 
reportingDB, sd, "Joe BI", "Managed",
+                salesFactColumns, "Metric");
+
+        loadProcess("loadSalesDaily", "hive query for daily summary", "John 
ETL", ImmutableList.of(salesFact, timeDim),
+            ImmutableList.of(salesFactDaily), "create table as select ", 
"plan", "id", "graph", "ETL");
+
+        List<Referenceable> productDimColumns = ImmutableList
+            .of(column("product_id", "int", "product id"), 
column("product_name", "string", "product name"),
+                column("brand_name", "int", "brand name"));
+
+        Id productDim =
+            table("product_dim", "product dimension table", salesDB, sd, "John 
Doe", "Managed", productDimColumns,
+                "Dimension");
+
+        view("product_dim_view", reportingDB, ImmutableList.of(productDim), 
"Dimension", "JdbcAccess");
+
+        List<Referenceable> customerDimColumns = 
ImmutableList.of(column("customer_id", "int", "customer id", "PII"),
+            column("name", "string", "customer name", "PII"),
+            column("address", "string", "customer address", "PII"));
+
+        Id customerDim =
+            table("customer_dim", "customer dimension table", salesDB, sd, 
"fetl", "External", customerDimColumns,
+                "Dimension");
+
+        view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), 
"Dimension", "JdbcAccess");
+
+        Id salesFactMonthly =
+            table("sales_fact_monthly_mv", "sales fact monthly materialized 
view", reportingDB, sd, "Jane BI",
+                "Managed", salesFactColumns, "Metric");
+
+        loadProcess("loadSalesMonthly", "hive query for monthly summary", 
"John ETL", ImmutableList.of(salesFactDaily),
+            ImmutableList.of(salesFactMonthly), "create table as select ", 
"plan", "id", "graph", "ETL");
+
+        partition(new ArrayList() {{ add("2015-01-01"); }}, salesFactDaily);
+    }
+
+    Id database(String name, String description, String owner, String 
locationUri, String... traitNames)
+        throws Exception {
+        Referenceable referenceable = new Referenceable(DATABASE_TYPE, 
traitNames);
+        referenceable.set("name", name);
+        referenceable.set("description", description);
+        referenceable.set("owner", owner);
+        referenceable.set("locationUri", locationUri);
+        referenceable.set("createTime", System.currentTimeMillis());
+
+        ClassType clsType = 
TypeSystem.getInstance().getDataType(ClassType.class, DATABASE_TYPE);
+        return createInstance(referenceable, clsType);
+    }
+
+    Referenceable storageDescriptor(String location, String inputFormat, 
String outputFormat, boolean compressed, List<Referenceable> columns)
+        throws Exception {
+        Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
+        referenceable.set("location", location);
+        referenceable.set("inputFormat", inputFormat);
+        referenceable.set("outputFormat", outputFormat);
+        referenceable.set("compressed", compressed);
+        referenceable.set("cols", columns);
+
+        return referenceable;
+    }
+
+    Referenceable column(String name, String dataType, String comment, 
String... traitNames) throws Exception {
+        Referenceable referenceable = new Referenceable(COLUMN_TYPE, 
traitNames);
+        referenceable.set("name", name);
+        referenceable.set("dataType", dataType);
+        referenceable.set("comment", comment);
+
+        return referenceable;
+    }
+
+    Id table(String name, String description, Id dbId, Referenceable sd, 
String owner, String tableType,
+        List<Referenceable> columns, String... traitNames) throws Exception {
+        Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, 
traitNames);
+        referenceable.set("name", name);
+        referenceable.set("description", description);
+        referenceable.set("owner", owner);
+        referenceable.set("tableType", tableType);
+        referenceable.set("temporary", false);
+        referenceable.set("createTime", new Date(System.currentTimeMillis()));
+        referenceable.set("lastAccessTime", System.currentTimeMillis());
+        referenceable.set("retention", System.currentTimeMillis());
+
+        referenceable.set("db", dbId);
+        // todo - uncomment this, something is broken
+        referenceable.set("sd", sd);
+        referenceable.set("columns", columns);
+
+        ClassType clsType = 
TypeSystem.getInstance().getDataType(ClassType.class, HIVE_TABLE_TYPE);
+        return createInstance(referenceable, clsType);
+    }
+
+    Id loadProcess(String name, String description, String user, List<Id> 
inputTables, List<Id> outputTables,
+        String queryText, String queryPlan, String queryId, String queryGraph, 
String... traitNames)
+        throws Exception {
+        Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, 
traitNames);
+        referenceable.set("name", name);
+        referenceable.set("description", description);
+        referenceable.set("user", user);
+        referenceable.set("startTime", System.currentTimeMillis());
+        referenceable.set("endTime", System.currentTimeMillis() + 10000);
+
+        referenceable.set("inputs", inputTables);
+        referenceable.set("outputs", outputTables);
+
+        referenceable.set("queryText", queryText);
+        referenceable.set("queryPlan", queryPlan);
+        referenceable.set("queryId", queryId);
+        referenceable.set("queryGraph", queryGraph);
+
+        ClassType clsType = 
TypeSystem.getInstance().getDataType(ClassType.class, HIVE_PROCESS_TYPE);
+        return createInstance(referenceable, clsType);
+    }
+
+    Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) 
throws Exception {
+        Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
+        referenceable.set("name", name);
+        referenceable.set("db", dbId);
+
+        referenceable.set("inputTables", inputTables);
+        ClassType clsType = 
TypeSystem.getInstance().getDataType(ClassType.class, VIEW_TYPE);
+        return createInstance(referenceable, clsType);
+    }
+
+    Id partition(List<String> values, Id table, String... traitNames) throws 
Exception {
+        Referenceable referenceable = new Referenceable(PARTITION_TYPE, 
traitNames);
+        referenceable.set("values", values);
+        referenceable.set("table", table);
+        ClassType clsType = 
TypeSystem.getInstance().getDataType(ClassType.class, PARTITION_TYPE);
+        return createInstance(referenceable, clsType);
+    }
+    private Id createInstance(Referenceable referenceable, ClassType clsType) 
throws Exception {
+//        String entityJSON = InstanceSerialization.toJson(referenceable, 
true);
+
+
+        ITypedReferenceableInstance typedInstance = 
clsType.convert(referenceable, Multiplicity.REQUIRED);
+        String guid = repository.createEntities(typedInstance)[0];
+
+        // return the reference to created instance with guid
+        return new Id(guid, 0, referenceable.getTypeName());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/7660c9b2/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
 
b/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
index c72ffb2..36b207a 100755
--- 
a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
+++ 
b/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
@@ -20,16 +20,11 @@ package org.apache.atlas.discovery;
 
 import com.google.common.collect.ImmutableList;
 import com.thinkaurelius.titan.core.TitanGraph;
-import com.thinkaurelius.titan.core.util.TitanCleanup;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.Vertex;
+import org.apache.atlas.BaseHiveRepositoryTest;
 import org.apache.atlas.RepositoryMetadataModule;
 import org.apache.atlas.TestUtils;
 import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
-import org.apache.atlas.query.HiveTitanSample;
-import org.apache.atlas.query.QueryTestsUtils;
 import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
-import org.apache.atlas.repository.graph.GraphHelper;
 import org.apache.atlas.repository.graph.GraphProvider;
 import org.apache.atlas.typesystem.ITypedReferenceableInstance;
 import org.apache.atlas.typesystem.Referenceable;
@@ -38,7 +33,6 @@ import org.apache.atlas.typesystem.types.DataTypes;
 import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
 import org.apache.atlas.typesystem.types.Multiplicity;
 import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.commons.io.FileUtils;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.testng.Assert;
@@ -49,18 +43,13 @@ import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import javax.inject.Inject;
-import javax.script.Bindings;
-import javax.script.ScriptEngine;
-import javax.script.ScriptEngineManager;
-import javax.script.ScriptException;
-import java.io.File;
 
 import static 
org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
 import static 
org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
 import static 
org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
 
 @Guice(modules = RepositoryMetadataModule.class)
-public class GraphBackedDiscoveryServiceTest {
+public class GraphBackedDiscoveryServiceTest extends BaseHiveRepositoryTest {
 
     @Inject
     private GraphProvider<TitanGraph> graphProvider;
@@ -73,12 +62,8 @@ public class GraphBackedDiscoveryServiceTest {
 
     @BeforeClass
     public void setUp() throws Exception {
+        super.setUp();
         TypeSystem typeSystem = TypeSystem.getInstance();
-        typeSystem.reset();
-
-        QueryTestsUtils.setupTypes();
-        setupSampleData();
-
         TestUtils.defineDeptEmployeeTypes(typeSystem);
 
         Referenceable hrDept = TestUtils.createDeptEg1(typeSystem);
@@ -88,42 +73,9 @@ public class GraphBackedDiscoveryServiceTest {
         repositoryService.createEntities(hrDept2);
     }
 
-    private void setupSampleData() throws ScriptException {
-        TitanGraph titanGraph = graphProvider.get();
-
-        ScriptEngineManager manager = new ScriptEngineManager();
-        ScriptEngine engine = manager.getEngineByName("gremlin-groovy");
-        Bindings bindings = engine.createBindings();
-        bindings.put("g", titanGraph);
-
-        String hiveGraphFile = FileUtils.getTempDirectory().getPath() + 
File.separator + System.nanoTime() + ".gson";
-        System.out.println("hiveGraphFile = " + hiveGraphFile);
-        HiveTitanSample.writeGson(hiveGraphFile);
-        bindings.put("hiveGraphFile", hiveGraphFile);
-
-        engine.eval("g.loadGraphSON(hiveGraphFile)", bindings);
-        titanGraph.commit();
-
-        System.out.println("*******************Graph 
Dump****************************");
-        for (Vertex vertex : titanGraph.getVertices()) {
-            System.out.println(GraphHelper.vertexString(vertex));
-        }
-
-        for (Edge edge : titanGraph.getEdges()) {
-            System.out.println(GraphHelper.edgeString(edge));
-        }
-        System.out.println("*******************Graph 
Dump****************************");
-    }
-
     @AfterClass
     public void tearDown() throws Exception {
-        TypeSystem.getInstance().reset();
-        graphProvider.get().shutdown();
-        try {
-            TitanCleanup.clear(graphProvider.get());
-        } catch(Exception e) {
-            e.printStackTrace();
-        }
+        super.tearDown();
     }
 
     @Test
@@ -176,54 +128,81 @@ public class GraphBackedDiscoveryServiceTest {
 
     @DataProvider(name = "dslQueriesProvider")
     private Object[][] createDSLQueries() {
-        return new String[][]{
-                {"from DB"}, {"DB"}, {"DB where DB.name=\"Reporting\""}, {"DB 
DB.name = \"Reporting\""},
-                {"DB where DB.name=\"Reporting\" select name, owner"}, {"DB 
has name"}, {"DB, Table"},
-                {"DB is JdbcAccess"},
-            /*
-            {"DB, LoadProcess has name"},
-            {"DB as db1, Table where db1.name = \"Reporting\""},
-            {"DB where DB.name=\"Reporting\" and DB.createTime < " + 
System.currentTimeMillis()},
-            */
-                {"from Table"}, {"Table"}, {"Table is Dimension"}, {"Column 
where Column isa PII"},
-                {"View is Dimension"},
-            /*{"Column where Column isa PII select Column.name"},*/
-                {"Column select Column.name"}, {"Column select name"}, 
{"Column where Column.name=\"customer_id\""},
-                {"from Table select Table.name"}, {"DB where (name = 
\"Reporting\")"},
-                {"DB where (name = \"Reporting\") select name as _col_0, owner 
as _col_1"},
-                {"DB where DB is JdbcAccess"}, {"DB where DB has name"}, {"DB 
Table"}, {"DB where DB has name"},
-                {"DB as db1 Table where (db1.name = \"Reporting\")"},
-                {"DB where (name = \"Reporting\") select name as _col_0, 
(createTime + 1) as _col_1 "},
-                {"Table where (name = \"sales_fact\" and created > 
\"2014-01-01\" ) select name as _col_0, created as _col_1 "},
-                {"Table where (name = \"sales_fact\" and created > 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, created as _col_1 "},
+        return new Object[][]{
+                {"from hive_db", 2},
+                {"hive_db", 2},
+                {"hive_db where hive_db.name=\"Reporting\"", 1},
+                {"hive_db hive_db.name = \"Reporting\"", 1},
+                {"hive_db where hive_db.name=\"Reporting\" select name, 
owner", 1},
+                {"hive_db has name", 2},
+                {"hive_db, hive_table", 6},
+                {"View is JdbcAccess", 2},
+                {"hive_db as db1, hive_table where db1.name = \"Reporting\"", 
0}, //Not working - ATLAS-145
+                // - Final working query -> 
discoveryService.searchByGremlin("L:{_var_0 = [] as 
Set;g.V().has(\"__typeName\", 
\"hive_db\").fill(_var_0);g.V().has(\"__superTypeNames\", 
\"hive_db\").fill(_var_0);_var_0._().as(\"db1\").in(\"__hive_table.db\").back(\"db1\").and(_().has(\"hive_db.name\",
 T.eq, \"Reporting\")).toList()}")
+                /*
+                {"hive_db, hive_process has name"}, //Invalid query
+                {"hive_db where hive_db.name=\"Reporting\" and 
hive_db.createTime < " + System.currentTimeMillis()}
+                */
+                {"from hive_table", 6},
+                {"hive_table", 6},
+                {"hive_table isa Dimension", 3},
+                {"hive_column where hive_column isa PII", 6},
+                {"View is Dimension" , 2},
+//                {"hive_column where hive_column isa PII select 
hive_column.name", 6}, //Not working - ATLAS-175
+                {"hive_column select hive_column.name", 27},
+                {"hive_column select name", 27},
+                {"hive_column where hive_column.name=\"customer_id\"", 4},
+                {"from hive_table select hive_table.name", 6},
+                {"hive_db where (name = \"Reporting\")", 1},
+                {"hive_db where (name = \"Reporting\") select name as _col_0, 
owner as _col_1", 1},
+                {"hive_db where hive_db is JdbcAccess", 0}, //Not supposed to 
work
+                {"hive_db hive_table", 6},
+                {"hive_db where hive_db has name", 2},
+                {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 
0}, //Not working -> ATLAS-145
+                {"hive_db where (name = \"Reporting\") select name as _col_0, 
(createTime + 1) as _col_1 ", 1},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 
1},
+
             /*
-            todo: does not work
-            {"DB where (name = \"Reporting\") and ((createTime + 1) > 0)"},
-            {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and 
(db1.name = \"Reporting\") select db1.name
+            todo: does not work - ATLAS-146
+            {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 
0)"},
+            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 
0) and (db1.name = \"Reporting\") select db1.name
             as dbName, tab.name as tabName"},
-            {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) or 
(db1.name = \"Reporting\") select db1.name
+            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 
0) or (db1.name = \"Reporting\") select db1.name
             as dbName, tab.name as tabName"},
-            {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and 
(db1.name = \"Reporting\") or db1 has owner
+            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 
0) and (db1.name = \"Reporting\") or db1 has owner
              select db1.name as dbName, tab.name as tabName"},
-            {"DB as db1 Table as tab where ((db1.createTime + 1) > 0) and 
(db1.name = \"Reporting\") or db1 has owner
+            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 
0) and (db1.name = \"Reporting\") or db1 has owner
              select db1.name as dbName, tab.name as tabName"},
             */
                 // trait searches
-                {"Dimension"},
-            /*{"Fact"}, - todo: does not work*/
-                {"JdbcAccess"}, {"ETL"}, {"Metric"}, {"PII"},
-                // Lineage
-                {"Table LoadProcess outputTable"}, {"Table loop (LoadProcess 
outputTable)"},
-                {"Table as _loop0 loop (LoadProcess outputTable) withPath"},
-                {"Table as src loop (LoadProcess outputTable) as dest select 
src.name as srcTable, dest.name as "
-                        + "destTable withPath"},
-                {"Table as t, sd, Column as c where t.name=\"sales_fact\" 
select c.name as colName, c.dataType as "
-                        + "colType"},
-                {"Table where name='sales_fact', db where name='Reporting'"}};
+                {"Dimension", 5},
+                {"JdbcAccess", 2},
+                {"ETL", 2},
+                {"Metric", 5},
+                {"PII", 6},
+
+                /* Lineage queries are fired through ClosureQuery and are 
tested through HiveLineageJerseyResourceIt in webapp module.
+                   Commenting out the below queries since DSL to Gremlin 
parsing/translation fails with lineage queries when there are array types
+                   used within loop expressions which is the case with 
DataSet.inputs and outputs.`
+                  // Lineage
+                  {"Table LoadProcess outputTable"}, {"Table loop (LoadProcess 
outputTable)"},
+                  {"Table as _loop0 loop (LoadProcess outputTable) withPath"},
+                  {"Table as src loop (LoadProcess outputTable) as dest select 
src.name as srcTable, dest.name as "
+                                        + "destTable withPath"},
+                 */
+//                {"hive_table as t, sd, hive_column as c where 
t.name=\"sales_fact\" select c.name as colName, c.dataType as "
+//                        + "colType", 0}, //Not working - ATLAS-145 and 
ATLAS-166
+
+                {"hive_table where name='sales_fact', db where name='Sales'", 
1},
+                {"hive_table where name='sales_fact', db where 
name='Reporting'", 0},
+                {"hive_partition as p where values = ['2015-01-01']", 1},
+//              {"StorageDesc select cols", 6} //Not working since loading of 
lists needs to be fixed yet
+        };
     }
 
     @Test(dataProvider = "dslQueriesProvider")
-    public void testSearchByDSLQueries(String dslQuery) throws Exception {
+    public void  testSearchByDSLQueries(String dslQuery, Integer 
expectedNumRows) throws Exception {
         System.out.println("Executing dslQuery = " + dslQuery);
         String jsonResults = discoveryService.searchByDSL(dslQuery);
         Assert.assertNotNull(jsonResults);
@@ -242,7 +221,7 @@ public class GraphBackedDiscoveryServiceTest {
 
         JSONArray rows = results.getJSONArray("rows");
         Assert.assertNotNull(rows);
-        Assert.assertTrue(rows.length() >= 0); // some queries may not have 
any results
+        Assert.assertEquals(rows.length(), expectedNumRows.intValue()); // 
some queries may not have any results
         System.out.println("query [" + dslQuery + "] returned [" + 
rows.length() + "] rows");
     }
 


Reply via email to