Repository: atlas
Updated Branches:
  refs/heads/master 5384a7427 -> 8db8b5c7c


http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java 
b/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
deleted file mode 100644
index 5e07a92..0000000
--- a/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
+++ /dev/null
@@ -1,823 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.query;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.TypeCategory;
-import org.apache.atlas.model.discovery.SearchParameters;
-import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
-import org.apache.atlas.query.Expressions.Expression;
-import org.apache.atlas.type.AtlasArrayType;
-import org.apache.atlas.type.AtlasBuiltInTypes;
-import org.apache.atlas.type.AtlasEntityType;
-import org.apache.atlas.type.AtlasStructType;
-import org.apache.atlas.type.AtlasType;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.commons.lang.StringUtils;
-import org.joda.time.DateTime;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.StringJoiner;
-import java.util.stream.Stream;
-
-public class QueryProcessor {
-    private static final Logger LOG = 
LoggerFactory.getLogger(QueryProcessor.class);
-
-    private final int DEFAULT_QUERY_RESULT_LIMIT = 25;
-    private final int DEFAULT_QUERY_RESULT_OFFSET = 0;
-
-    private final List<String>      errorList      = new ArrayList<>();
-    private final GremlinClauseList queryClauses   = new GremlinClauseList();
-
-    private int     providedLimit  = DEFAULT_QUERY_RESULT_LIMIT;
-    private int     providedOffset = DEFAULT_QUERY_RESULT_OFFSET;
-    private boolean hasSelect      = false;
-    private boolean isSelectNoop   = false;
-    private boolean hasGrpBy       = false;
-
-    private final org.apache.atlas.query.Lookup lookup;
-    private final boolean isNestedQuery;
-    private int currentStep;
-    private Context context;
-
-    @Inject
-    public QueryProcessor(AtlasTypeRegistry typeRegistry) {
-        this.isNestedQuery = false;
-        lookup = new Lookup(errorList, typeRegistry);
-        context = new Context(errorList, lookup);
-        init();
-    }
-
-    public QueryProcessor(AtlasTypeRegistry typeRegistry, int limit, int 
offset) {
-        this(typeRegistry);
-        this.providedLimit = limit;
-        this.providedOffset = offset < 0 ? DEFAULT_QUERY_RESULT_OFFSET : 
offset;
-    }
-
-    @VisibleForTesting
-    QueryProcessor(org.apache.atlas.query.Lookup lookup, Context context) {
-        this.isNestedQuery = false;
-        this.lookup = lookup;
-        this.context = context;
-        init();
-    }
-
-    public QueryProcessor(org.apache.atlas.query.Lookup registryLookup, 
boolean isNestedQuery) {
-        this.isNestedQuery = isNestedQuery;
-        this.lookup = registryLookup;
-        init();
-    }
-
-    public Expression validate(Expression expression) {
-        return expression.isReady();
-    }
-
-    public void addFrom(String typeName) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addFrom(typeName={})", typeName);
-        }
-
-        IdentifierHelper.Advice ta = getAdvice(typeName);
-        if(context.shouldRegister(ta.get())) {
-            context.registerActive(ta.get());
-
-            IdentifierHelper.Advice ia = getAdvice(ta.get());
-            if (ia.isTrait()) {
-                add(GremlinClause.TRAIT, ia.get());
-            } else {
-                if (ia.hasSubtypes()) {
-                    add(GremlinClause.HAS_TYPE_WITHIN, ia.getSubTypes());
-                } else {
-                    add(GremlinClause.HAS_TYPE, ia.get());
-                }
-            }
-        } else {
-            IdentifierHelper.Advice ia = getAdvice(ta.get());
-            introduceType(ia);
-        }
-    }
-
-    public void addFromProperty(String typeName, String attribute) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addFromProperty(typeName={}, attribute={})", typeName, 
attribute);
-        }
-
-        addFrom(typeName);
-        add(GremlinClause.HAS_PROPERTY,
-                IdentifierHelper.getQualifiedName(lookup, context, attribute));
-    }
-
-
-    public void addFromIsA(String typeName, String traitName) {
-        addFrom(typeName);
-        add(GremlinClause.TRAIT, traitName);
-    }
-
-    public void addWhere(String lhs, String operator, String rhs) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addWhere(lhs={}, operator={}, rhs={})", lhs, operator, 
rhs);
-        }
-
-        String currentType  = context.getActiveTypeName();
-        SearchParameters.Operator op = 
SearchParameters.Operator.fromString(operator);
-        IdentifierHelper.Advice org = null;
-        IdentifierHelper.Advice lhsI = getAdvice(lhs);
-        if(lhsI.isPrimitive() == false) {
-            introduceType(lhsI);
-            org = lhsI;
-            lhsI = getAdvice(lhs);
-        }
-
-        if(lhsI.isDate()) {
-            rhs = parseDate(rhs);
-        }
-
-        rhs = addQuotesIfNecessary(rhs);
-        if(op == SearchParameters.Operator.LIKE) {
-            add(GremlinClause.TEXT_CONTAINS, lhsI.getQualifiedName(), 
rhs.replace("*", ".*").replace('?', '.'));
-        } else if(op == SearchParameters.Operator.IN) {
-            add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), "within", 
rhs);
-        } else {
-            add(GremlinClause.HAS_OPERATOR, lhsI.getQualifiedName(), 
op.getSymbols()[1], rhs);
-        }
-
-        if(org != null && org.isPrimitive() == false && 
org.getIntroduceType()) {
-            add(GremlinClause.IN, org.getEdgeLabel());
-            context.registerActive(currentType);
-        }
-    }
-
-    private String addQuotesIfNecessary(String rhs) {
-        if(IdentifierHelper.isQuoted(rhs)) return rhs;
-        return quoted(rhs);
-    }
-
-    private static String quoted(String rhs) {
-        return IdentifierHelper.getQuoted(rhs);
-    }
-
-    private String parseDate(String rhs) {
-        String s = IdentifierHelper.isQuoted(rhs) ?
-                IdentifierHelper.removeQuotes(rhs) :
-                rhs;
-        return String.format("'%d'", DateTime.parse(s).getMillis());
-    }
-
-    public void addAndClauses(List<String> clauses) {
-        queryClauses.add(GremlinClause.AND, StringUtils.join(clauses, ','));
-    }
-
-    public void addOrClauses(List<String> clauses) {
-        queryClauses.add(GremlinClause.OR, StringUtils.join(clauses, ','));
-    }
-
-    public void addSelect(SelectExprMetadata selectExprMetadata) {
-        String[] items  = selectExprMetadata.getItems();
-        String[] labels = selectExprMetadata.getLabels();
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addSelect(items.length={})", items != null ? 
items.length : 0);
-        }
-
-        if (items != null) {
-            for (int i = 0; i < items.length; i++) {
-                IdentifierHelper.Advice ia = getAdvice(items[i]);
-
-                if(!labels[i].equals(items[i])) {
-                    context.aliasMap.put(labels[i], ia.getQualifiedName());
-                }
-
-                if (i == selectExprMetadata.getCountIdx()) {
-                    items[i] = GremlinClause.INLINE_COUNT.get();
-                } else if (i == selectExprMetadata.getMinIdx()) {
-                    items[i] = 
GremlinClause.INLINE_MIN.get(ia.getQualifiedName(), ia.getQualifiedName());
-                } else if (i == selectExprMetadata.getMaxIdx()) {
-                    items[i] = 
GremlinClause.INLINE_MAX.get(ia.getQualifiedName(), ia.getQualifiedName());
-                } else if (i == selectExprMetadata.getSumIdx()) {
-                    items[i] = 
GremlinClause.INLINE_SUM.get(ia.getQualifiedName(), ia.getQualifiedName());
-                } else {
-                    if (!ia.isPrimitive() && ia.getIntroduceType()) {
-                        add(GremlinClause.OUT, ia.getEdgeLabel());
-                        context.registerActive(ia.getTypeName());
-
-                        int dotIdx = ia.get().indexOf(".");
-                        if (dotIdx != -1) {
-                            IdentifierHelper.Advice iax = getAdvice(ia.get());
-                            items[i] = 
GremlinClause.INLINE_GET_PROPERTY.get(iax.getQualifiedName());
-                        } else {
-                            isSelectNoop = true;
-                        }
-                    } else {
-                        items[i] = 
GremlinClause.INLINE_GET_PROPERTY.get(ia.getQualifiedName());
-                    }
-                }
-            }
-
-            // If GroupBy clause exists then the query spits out a 
List<Map<String, List<AtlasVertex>>> otherwise the query returns 
List<AtlasVertex>
-            // Different transformations are needed for DSLs with groupby and 
w/o groupby
-            GremlinClause transformationFn;
-            if (isSelectNoop) {
-                transformationFn = GremlinClause.SELECT_EXPR_NOOP_FN;
-            } else {
-                transformationFn = hasGrpBy ? 
GremlinClause.SELECT_WITH_GRPBY_HELPER_FN : GremlinClause.SELECT_EXPR_HELPER_FN;
-            }
-            queryClauses.add(0, transformationFn, getJoinedQuotedStr(labels), 
String.join(",", items));
-            queryClauses.add(GremlinClause.INLINE_TRANSFORM_CALL);
-
-            hasSelect = true;
-        }
-    }
-
-    public QueryProcessor createNestedProcessor() {
-        QueryProcessor qp = new QueryProcessor(lookup, true);
-        qp.context = this.context;
-        return qp;
-    }
-
-    public void addFromAlias(String typeName, String alias) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addFromAlias(typeName={}, alias={})", typeName, alias);
-        }
-
-        addFrom(typeName);
-        addAsClause(alias);
-        context.registerAlias(alias);
-    }
-
-    public void addAsClause(String stepName) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addAsClause(stepName={})", stepName);
-        }
-
-        add(GremlinClause.AS, stepName);
-    }
-
-    public void addGroupBy(String item) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addGroupBy(item={})", item);
-        }
-
-        addGroupByClause(item);
-        hasGrpBy = true;
-    }
-
-    public void addLimit(String limit, String offset) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addLimit(limit={}, offset={})", limit, offset);
-        }
-
-        if (offset.equalsIgnoreCase("0")) {
-            add(GremlinClause.LIMIT, limit);
-        } else {
-            addRangeClause(offset, limit);
-        }
-    }
-
-    public void close() {
-        if (queryClauses.isEmpty()) {
-            queryClauses.clear();
-            return;
-        }
-
-        if (queryClauses.hasClause(GremlinClause.LIMIT) == -1) {
-            addLimit(Integer.toString(providedLimit), 
Integer.toString(providedOffset));
-        }
-
-        updatePosition(GremlinClause.LIMIT);
-        add(GremlinClause.TO_LIST);
-        updatePosition(GremlinClause.INLINE_TRANSFORM_CALL);
-    }
-
-    public String getText() {
-        String ret;
-        String[] items = new String[queryClauses.size()];
-
-        int startIdx = hasSelect ? 1 : 0;
-        int endIdx = hasSelect ? queryClauses.size() - 1 : queryClauses.size();
-        for (int i = startIdx; i < endIdx; i++) {
-            items[i] = queryClauses.getValue(i);
-        }
-
-        if (hasSelect) {
-            String body = 
StringUtils.join(Stream.of(items).filter(Objects::nonNull).toArray(), ".");
-            String inlineFn = queryClauses.getValue(queryClauses.size() - 1);
-            String funCall = String.format(inlineFn, body);
-            ret = queryClauses.getValue(0) + funCall;
-        } else {
-            ret = String.join(".", items);
-        }
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("getText() => {}", ret);
-        }
-        return ret;
-    }
-
-    public boolean hasSelect() {
-        return hasSelect;
-    }
-
-    public void addOrderBy(String name, boolean isDesc) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
-        }
-
-        addOrderByClause(name, isDesc);
-    }
-
-    private void updatePosition(GremlinClause clause) {
-        int index = queryClauses.hasClause(clause);
-        if(-1 == index) {
-            return;
-        }
-
-        GremlinClauseValue gcv = queryClauses.remove(index);
-        queryClauses.add(gcv);
-    }
-
-    private void init() {
-        if (!isNestedQuery) {
-            add(GremlinClause.G);
-            add(GremlinClause.V);
-        } else {
-            add(GremlinClause.NESTED_START);
-        }
-    }
-
-    private void introduceType(IdentifierHelper.Advice ia) {
-        if (!ia.isPrimitive() && ia.getIntroduceType()) {
-            add(GremlinClause.OUT, ia.getEdgeLabel());
-            context.registerActive(ia.getTypeName());
-        }
-    }
-
-    private IdentifierHelper.Advice getAdvice(String actualTypeName) {
-        return IdentifierHelper.create(context, lookup, actualTypeName);
-    }
-
-    private String getJoinedQuotedStr(String[] elements) {
-        StringJoiner joiner = new StringJoiner(",");
-        Arrays.stream(elements).map(x -> "'" + x + "'").forEach(joiner::add);
-        return joiner.toString();
-    }
-
-    private void add(GremlinClause clause, String... args) {
-        queryClauses.add(new GremlinClauseValue(clause, clause.get(args)));
-    }
-
-    private void add(int idx, GremlinClause clause, String... args) {
-        queryClauses.add(idx, new GremlinClauseValue(clause, 
clause.get(args)));
-    }
-
-    private void addRangeClause(String startIndex, String endIndex) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addRangeClause(startIndex={}, endIndex={})", 
startIndex, endIndex);
-        }
-
-        if (hasSelect) {
-            add(queryClauses.size() - 1, GremlinClause.RANGE, startIndex, 
startIndex, endIndex);
-        } else {
-            add(GremlinClause.RANGE, startIndex, startIndex, endIndex);
-        }
-    }
-
-    private void addOrderByClause(String name, boolean descr) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addOrderByClause(name={})", name, descr);
-        }
-
-        IdentifierHelper.Advice ia = getAdvice(name);
-        add((!descr) ? GremlinClause.ORDER_BY : GremlinClause.ORDER_BY_DESC, 
ia.getQualifiedName());
-    }
-
-    private void addGroupByClause(String name) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("addGroupByClause(name={})", name);
-        }
-
-        IdentifierHelper.Advice ia = getAdvice(name);
-        add(GremlinClause.GROUP_BY, ia.getQualifiedName());
-    }
-
-    private enum GremlinClause {
-        AS("as('%s')"),
-        DEDUP("dedup()"),
-        G("g"),
-        GROUP_BY("group().by('%')"),
-        HAS("has('%s', %s)"),
-        HAS_OPERATOR("has('%s', %s(%s))"),
-        HAS_PROPERTY("has('%s')"),
-        HAS_NOT_PROPERTY("hasNot('%s')"),
-        HAS_TYPE("has('__typeName', '%s')"),
-        HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
-        HAS_WITHIN("has('%s', within(%s))"),
-        IN("in('%s')"),
-        OR("or(%s)"),
-        AND("and(%s)"),
-        NESTED_START("__"),
-        NESTED_HAS_OPERATOR("has('%s', %s(%s))"),
-        LIMIT("limit(%s)"),
-        ORDER_BY("order().by('%s')"),
-        ORDER_BY_DESC("order().by('%s', decr)"),
-        OUT("out('%s')"),
-        RANGE("range(%s, %s + %s)"),
-        SELECT("select('%s')"),
-        TO_LIST("toList()"),
-        TEXT_CONTAINS("has('%s', 
org.janusgraph.core.attribute.Text.textRegex(%s))"),
-        TEXT_PREFIX("has('%s', 
org.janusgraph.core.attribute.Text.textPrefix(%s))"),
-        TEXT_SUFFIX("has('%s', 
org.janusgraph.core.attribute.Text.textRegex(\".*\" + %s))"),
-        TRAIT("has('__traitNames', within('%s'))"),
-        SELECT_EXPR_NOOP_FN("def f(r){ r }; "),
-        SELECT_EXPR_HELPER_FN("def f(r){ return 
[[%s]].plus(r.collect({[%s]})).unique(); }; "),
-        SELECT_WITH_GRPBY_HELPER_FN("def f(r){ return 
[[%s]].plus(r.collect({it.values()}).flatten().collect({[%s]})).unique(); }; "),
-        INLINE_COUNT("r.size()"),
-        INLINE_SUM("r.sum({it.value('%s')}).value('%s')"),
-        INLINE_MAX("r.max({it.value('%s')}).value('%s')"),
-        INLINE_MIN("r.min({it.value('%s')}).value('%s')"),
-        INLINE_GET_PROPERTY("it.value('%s')"),
-        INLINE_OUT_VERTEX("it.out('%s')"),
-        INLINE_OUT_VERTEX_VALUE("it.out('%s').value('%s')"), // This might 
require more closure introduction :(
-        INLINE_TRANSFORM_CALL("f(%s)"),
-        V("V()"),
-        VALUE_MAP("valueMap(%s)");
-
-        private final String format;
-
-        GremlinClause(String format) {
-            this.format = format;
-        }
-
-        String get(String... args) {
-            return (args == null || args.length == 0) ?
-                    format :
-                    String.format(format, args);
-        }
-    }
-
-    private static class GremlinClauseValue {
-        private final GremlinClause clause;
-        private final String value;
-
-        public GremlinClauseValue(GremlinClause clause, String value) {
-            this.clause = clause;
-            this.value = value;
-        }
-
-        public GremlinClause getClause() {
-            return clause;
-        }
-
-        public String getValue() {
-            return value;
-        }
-    }
-
-    private static class GremlinClauseList {
-        private final List<GremlinClauseValue> list;
-
-        private GremlinClauseList() {
-            this.list = new LinkedList<>();
-        }
-
-        public void add(GremlinClauseValue g) {
-            list.add(g);
-        }
-
-        public void add(int idx, GremlinClauseValue g) {
-            list.add(idx, g);
-        }
-
-        public void add(GremlinClauseValue g, AtlasEntityType t) {
-            add(g);
-        }
-
-        public void add(int idx, GremlinClauseValue g, AtlasEntityType t) {
-            add(idx, g);
-        }
-
-        public void add(GremlinClause clause, String... args) {
-            list.add(new GremlinClauseValue(clause, clause.get(args)));
-        }
-
-        public void add(int i, GremlinClause clause, String... args) {
-            list.add(i, new GremlinClauseValue(clause, clause.get(args)));
-        }
-
-        public GremlinClauseValue getAt(int i) {
-            return list.get(i);
-        }
-
-        public String getValue(int i) {
-            return list.get(i).value;
-        }
-
-        public GremlinClauseValue get(int i) {
-            return list.get(i);
-        }
-
-        public int size() {
-            return list.size();
-        }
-
-        public int hasClause(GremlinClause clause) {
-            for (int i = 0; i < list.size(); i++) {
-                if (list.get(i).getClause() == clause)
-                    return i;
-            }
-
-            return -1;
-        }
-
-        public boolean isEmpty() {
-            return list.size() == 0 || list.size() == 2;
-        }
-
-        public void clear() {
-            list.clear();
-        }
-
-        public GremlinClauseValue remove(int index) {
-            GremlinClauseValue gcv = get(index);
-            list.remove(index);
-            return gcv;
-        }
-    }
-
-    @VisibleForTesting
-    static class Context {
-        private final List<String> errorList;
-        org.apache.atlas.query.Lookup lookup;
-        Map<String, String> aliasMap = new HashMap<>();
-        private AtlasType activeType;
-
-        public Context(List<String> errorList, org.apache.atlas.query.Lookup 
lookup) {
-            this.lookup = lookup;
-            this.errorList = errorList;
-        }
-
-        public void registerActive(String typeName) {
-            if(shouldRegister(typeName)) {
-                activeType = lookup.getType(typeName);
-            }
-
-            aliasMap.put(typeName, typeName);
-        }
-
-        public AtlasType getActiveType() {
-            return activeType;
-        }
-
-        public AtlasEntityType getActiveEntityType() {
-            return (activeType instanceof AtlasEntityType) ?
-                    (AtlasEntityType) activeType :
-                    null;
-        }
-
-        public String getActiveTypeName() {
-            return activeType.getTypeName();
-        }
-
-        public boolean shouldRegister(String typeName) {
-            return activeType == null ||
-                    (activeType != null && 
!StringUtils.equals(getActiveTypeName(), typeName)) &&
-                            (activeType != null && !lookup.hasAttribute(this, 
typeName));
-        }
-
-        public void registerAlias(String alias) {
-            if(aliasMap.containsKey(alias)) {
-                errorList.add(String.format("Duplicate alias found: %s for 
type %s already present.", alias, getActiveEntityType()));
-                return;
-            }
-
-            aliasMap.put(alias, getActiveTypeName());
-        }
-
-        public boolean hasAlias(String alias) {
-            return aliasMap.containsKey(alias);
-        }
-
-        public String getTypeNameFromAlias(String alias) {
-            return aliasMap.get(alias);
-        }
-
-        public boolean isEmpty() {
-            return activeType == null;
-        }
-    }
-
-    private static class Lookup implements org.apache.atlas.query.Lookup {
-        private final List<String> errorList;
-        private final AtlasTypeRegistry typeRegistry;
-
-        public Lookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
-            this.errorList = errorList;
-            this.typeRegistry = typeRegistry;
-        }
-
-        @Override
-        public AtlasType getType(String typeName) {
-            try {
-                return typeRegistry.getType(typeName);
-            } catch (AtlasBaseException e) {
-                addError(e.getMessage());
-            }
-
-            return null;
-        }
-
-        @Override
-        public String getQualifiedName(Context context, String name) {
-            try {
-                AtlasEntityType et = context.getActiveEntityType();
-                if(et == null) {
-                    return "";
-                }
-
-                return et.getQualifiedAttributeName(name);
-            } catch (AtlasBaseException e) {
-                addError(e.getMessage());
-            }
-
-            return "";
-        }
-
-        protected void addError(String s) {
-            errorList.add(s);
-        }
-
-        @Override
-        public boolean isPrimitive(Context context, String attributeName) {
-            AtlasEntityType et = context.getActiveEntityType();
-            if(et == null) {
-                return false;
-            }
-
-            AtlasType attr = et.getAttributeType(attributeName);
-            if(attr == null) {
-                return false;
-            }
-
-            TypeCategory attrTypeCategory = attr.getTypeCategory();
-            return (attrTypeCategory != null) && (attrTypeCategory == 
TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM);
-        }
-
-        @Override
-        public String getRelationshipEdgeLabel(Context context, String 
attributeName) {
-            AtlasEntityType et = context.getActiveEntityType();
-            if(et == null) {
-                return "";
-            }
-
-            AtlasStructType.AtlasAttribute attr = 
et.getAttribute(attributeName);
-            return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
-        }
-
-        @Override
-        public boolean hasAttribute(Context context, String typeName) {
-            return (context.getActiveEntityType() != null) && 
context.getActiveEntityType().getAttribute(typeName) != null;
-        }
-
-        @Override
-        public boolean doesTypeHaveSubTypes(Context context) {
-            return (context.getActiveEntityType() != null && 
context.getActiveEntityType().getAllSubTypes().size() > 0);
-        }
-
-        @Override
-        public String getTypeAndSubTypes(Context context) {
-            String[] str = context.getActiveEntityType() != null ?
-                            
context.getActiveEntityType().getTypeAndAllSubTypes().toArray(new String[]{}) :
-                            new String[]{};
-            if(str.length == 0) {
-                return null;
-            }
-
-            String[] quoted = new String[str.length];
-            for (int i = 0; i < str.length; i++) {
-                quoted[i] = quoted(str[i]);
-            }
-
-            return StringUtils.join(quoted, ",");
-        }
-
-        @Override
-        public boolean isTraitType(Context context) {
-            return (context.getActiveType() != null &&
-                    context.getActiveType().getTypeCategory() == 
TypeCategory.CLASSIFICATION);
-        }
-
-        @Override
-        public String getTypeFromEdge(Context context, String item) {
-            AtlasEntityType et = context.getActiveEntityType();
-            if(et == null) {
-                return "";
-            }
-
-            AtlasStructType.AtlasAttribute attr = et.getAttribute(item);
-            if(attr == null) {
-                return null;
-            }
-
-            AtlasType at = attr.getAttributeType();
-            if(at.getTypeCategory() == TypeCategory.ARRAY) {
-                AtlasArrayType arrType = ((AtlasArrayType)at);
-                return ((AtlasBuiltInTypes.AtlasObjectIdType) 
arrType.getElementType()).getObjectType();
-            }
-
-            return 
context.getActiveEntityType().getAttribute(item).getTypeName();
-        }
-
-        @Override
-        public boolean isDate(Context context, String attributeName) {
-            AtlasEntityType et = context.getActiveEntityType();
-            if (et == null) {
-                return false;
-            }
-
-            AtlasType attr = et.getAttributeType(attributeName);
-            return attr != null && 
attr.getTypeName().equals(AtlasBaseTypeDef.ATLAS_TYPE_DATE);
-
-        }
-    }
-
-    static class SelectExprMetadata {
-        private String[] items;
-        private String[] labels;
-
-        private int countIdx = -1;
-        private int sumIdx   = -1;
-        private int maxIdx   = -1;
-        private int minIdx   = -1;
-
-        public String[] getItems() {
-            return items;
-        }
-
-        public int getCountIdx() {
-            return countIdx;
-        }
-
-        public void setCountIdx(final int countIdx) {
-            this.countIdx = countIdx;
-        }
-
-        public int getSumIdx() {
-            return sumIdx;
-        }
-
-        public void setSumIdx(final int sumIdx) {
-            this.sumIdx = sumIdx;
-        }
-
-        public int getMaxIdx() {
-            return maxIdx;
-        }
-
-        public void setMaxIdx(final int maxIdx) {
-            this.maxIdx = maxIdx;
-        }
-
-        public int getMinIdx() {
-            return minIdx;
-        }
-
-        public void setMinIdx(final int minIdx) {
-            this.minIdx = minIdx;
-        }
-
-        public String[] getLabels() {
-            return labels;
-        }
-
-        public void setItems(final String[] items) {
-            this.items = items;
-        }
-
-        public void setLabels(final String[] labels) {
-            this.labels = labels;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.java 
b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.java
index 576e129..77c66c9 100644
--- a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.java
+++ b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.java
@@ -1,13 +1,15 @@
 // Generated from 
repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.g4 by 
ANTLR 4.7
 package org.apache.atlas.query.antlr4;
-import org.antlr.v4.runtime.Lexer;
 import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenStream;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.atn.*;
+import org.antlr.v4.runtime.Lexer;
+import org.antlr.v4.runtime.RuntimeMetaData;
+import org.antlr.v4.runtime.Vocabulary;
+import org.antlr.v4.runtime.VocabularyImpl;
+import org.antlr.v4.runtime.atn.ATN;
+import org.antlr.v4.runtime.atn.ATNDeserializer;
+import org.antlr.v4.runtime.atn.LexerATNSimulator;
+import org.antlr.v4.runtime.atn.PredictionContextCache;
 import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.misc.*;
 
 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
 public class AtlasDSLLexer extends Lexer {

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.tokens
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.tokens 
b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.tokens
deleted file mode 100644
index 8c147b4..0000000
--- 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLLexer.tokens
+++ /dev/null
@@ -1,59 +0,0 @@
-SINGLE_LINE_COMMENT=1
-MULTILINE_COMMENT=2
-WS=3
-NUMBER=4
-FLOATING_NUMBER=5
-BOOL=6
-K_COMMA=7
-K_PLUS=8
-K_MINUS=9
-K_STAR=10
-K_DIV=11
-K_DOT=12
-K_LIKE=13
-K_AND=14
-K_OR=15
-K_LPAREN=16
-K_LBRACKET=17
-K_RPAREN=18
-K_RBRACKET=19
-K_LT=20
-K_LTE=21
-K_EQ=22
-K_NEQ=23
-K_GT=24
-K_GTE=25
-K_FROM=26
-K_WHERE=27
-K_ORDERBY=28
-K_GROUPBY=29
-K_LIMIT=30
-K_SELECT=31
-K_MAX=32
-K_MIN=33
-K_SUM=34
-K_COUNT=35
-K_LOOP=36
-K_OFFSET=37
-K_AS=38
-K_ISA=39
-K_IS=40
-K_HAS=41
-K_ASC=42
-K_DESC=43
-K_WITHPATH=44
-K_TRUE=45
-K_FALSE=46
-KEYWORD=47
-ID=48
-STRING=49
-','=7
-'+'=8
-'-'=9
-'*'=10
-'/'=11
-'.'=12
-'('=16
-'['=17
-')'=18
-']'=19

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4 
b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4
index 058a5c8..72d08e1 100644
--- a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4
+++ b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.g4
@@ -107,4 +107,4 @@ querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
 query: querySrc groupByExpression?
                 selectClause?
                 orderByExpr?
-                limitOffset? ;
+                limitOffset? ;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.tokens
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.tokens 
b/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.tokens
deleted file mode 100644
index 8c147b4..0000000
--- 
a/repository/src/main/java/org/apache/atlas/query/antlr4/AtlasDSLParser.tokens
+++ /dev/null
@@ -1,59 +0,0 @@
-SINGLE_LINE_COMMENT=1
-MULTILINE_COMMENT=2
-WS=3
-NUMBER=4
-FLOATING_NUMBER=5
-BOOL=6
-K_COMMA=7
-K_PLUS=8
-K_MINUS=9
-K_STAR=10
-K_DIV=11
-K_DOT=12
-K_LIKE=13
-K_AND=14
-K_OR=15
-K_LPAREN=16
-K_LBRACKET=17
-K_RPAREN=18
-K_RBRACKET=19
-K_LT=20
-K_LTE=21
-K_EQ=22
-K_NEQ=23
-K_GT=24
-K_GTE=25
-K_FROM=26
-K_WHERE=27
-K_ORDERBY=28
-K_GROUPBY=29
-K_LIMIT=30
-K_SELECT=31
-K_MAX=32
-K_MIN=33
-K_SUM=34
-K_COUNT=35
-K_LOOP=36
-K_OFFSET=37
-K_AS=38
-K_ISA=39
-K_IS=40
-K_HAS=41
-K_ASC=42
-K_DESC=43
-K_WITHPATH=44
-K_TRUE=45
-K_FALSE=46
-KEYWORD=47
-ID=48
-STRING=49
-','=7
-'+'=8
-'-'=9
-'*'=10
-'/'=11
-'.'=12
-'('=16
-'['=17
-')'=18
-']'=19

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasAbstractDefStoreV1.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasAbstractDefStoreV1.java
 
b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasAbstractDefStoreV1.java
index 02dad46..48973c3 100644
--- 
a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasAbstractDefStoreV1.java
+++ 
b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasAbstractDefStoreV1.java
@@ -23,7 +23,7 @@ import org.apache.atlas.AtlasException;
 import org.apache.atlas.exception.AtlasBaseException;
 import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
 import org.apache.atlas.model.typedef.AtlasStructDef;
-import org.apache.atlas.query.QueryParser;
+import org.apache.atlas.query.AtlasDSL;
 import org.apache.atlas.repository.graphdb.AtlasVertex;
 import org.apache.atlas.repository.store.graph.AtlasDefStore;
 import org.apache.atlas.type.AtlasTypeRegistry;
@@ -64,7 +64,7 @@ import java.util.regex.Pattern;
             if (!allowReservedKeywords && typeDef instanceof AtlasStructDef) {
                 final List<AtlasStructDef.AtlasAttributeDef> attributeDefs = 
((AtlasStructDef) typeDef).getAttributeDefs();
                 for (AtlasStructDef.AtlasAttributeDef attrDef : attributeDefs) 
{
-                    if (QueryParser.isKeyword(attrDef.getName())) {
+                    if (AtlasDSL.Parser.isKeyword(attrDef.getName())) {
                         throw new 
AtlasBaseException(AtlasErrorCode.ATTRIBUTE_NAME_INVALID, attrDef.getName(), 
typeDef.getCategory().name());
                     }
                 }

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipDefStoreV1.java
----------------------------------------------------------------------
diff --git 
a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipDefStoreV1.java
 
b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipDefStoreV1.java
index d325300..7163e42 100644
--- 
a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipDefStoreV1.java
+++ 
b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipDefStoreV1.java
@@ -25,7 +25,7 @@ import org.apache.atlas.model.typedef.AtlasRelationshipDef;
 import 
org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
 import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
 import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
-import org.apache.atlas.query.QueryParser;
+import org.apache.atlas.query.AtlasDSL;
 import org.apache.atlas.repository.Constants;
 import org.apache.atlas.repository.graphdb.AtlasEdge;
 import org.apache.atlas.repository.graphdb.AtlasVertex;
@@ -354,11 +354,11 @@ public class AtlasRelationshipDefStoreV1 extends 
AtlasAbstractDefStoreV1<AtlasRe
         }
 
         if (!allowReservedKeywords) {
-            if (QueryParser.isKeyword(end1.getName())) {
+            if (AtlasDSL.Parser.isKeyword(end1.getName())) {
                 throw new 
AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END1_NAME_INVALID, 
end1.getName());
             }
 
-            if (QueryParser.isKeyword(end2.getName())) {
+            if (AtlasDSL.Parser.isKeyword(end2.getName())) {
                 throw new 
AtlasBaseException(AtlasErrorCode.RELATIONSHIPDEF_END2_NAME_INVALID, 
end2.getName());
             }
         }

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/test/java/org/apache/atlas/query/DSLQueriesTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/query/DSLQueriesTest.java 
b/repository/src/test/java/org/apache/atlas/query/DSLQueriesTest.java
index e4d4475..af4dc3e 100644
--- a/repository/src/test/java/org/apache/atlas/query/DSLQueriesTest.java
+++ b/repository/src/test/java/org/apache/atlas/query/DSLQueriesTest.java
@@ -127,8 +127,8 @@ public class DSLQueriesTest extends BasicTestSetup {
                 {"hive_db where hive_db is JdbcAccess", 0},
                 {"hive_db where hive_db has name", 3},
                 {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 
0},
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 
1},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
                 {"Dimension", 5},
                 {"JdbcAccess", 2},
                 {"ETL", 5},
@@ -240,12 +240,12 @@ public class DSLQueriesTest extends BasicTestSetup {
 
                 {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 
0},
 
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1", 1},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
                 {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 
0", 1},
                 {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 
5", 0},
 
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 
1},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1", 1},
                 {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
limit 10 offset 0", 1},
                 {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
limit 10 offset 1", 0},
                 {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
limit 10", 1},
@@ -285,7 +285,7 @@ public class DSLQueriesTest extends BasicTestSetup {
                 {"hive_column select hive_column.qualifiedName orderby 
qualifiedName desc limit 5", 5, "hive_column.qualifiedName", false},
 
                 {"from hive_db orderby hive_db.owner limit 3", 3, "owner", 
true},
-                {"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 
1, "owner", true},
+                {"hive_db where hive_db.name=\"Reporting\" orderby owner", 1, 
"owner", true},
 
                 {"hive_db where hive_db.name=\"Reporting\" orderby 
hive_db.owner limit 10 ", 1, "owner", true},
                 {"hive_db where hive_db.name=\"Reporting\" select name, owner 
orderby hive_db.name ", 1, "name", true},
@@ -327,16 +327,16 @@ public class DSLQueriesTest extends BasicTestSetup {
                 {"hive_db where hive_db has name orderby hive_db.owner limit 2 
offset 0", 2, "owner", true},
                 {"hive_db where hive_db has name orderby hive_db.owner limit 2 
offset 1", 2, "owner", true},
 
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' 
", 1, "_col_1", true},
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' 
limit 10 ", 1, "_col_1", true},
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' 
limit 10 offset 0", 1, "_col_1", true},
-                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' 
limit 10 offset 5", 0, "_col_1", true},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime 
", 1, "_col_1", true},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime 
limit 10 ", 1, "_col_1", true},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime 
limit 10 offset 0", 1, "_col_1", true},
+                {"hive_table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby createTime 
limit 10 offset 5", 0, "_col_1", true},
 
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby '_col_0' ", 1, "_col_0", true},
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby '_col_0' limit 10 offset 0", 1, "_col_0", true},
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby '_col_0' limit 10 offset 1", 0, "_col_0", true},
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby '_col_0' limit 10", 1, "_col_0", true},
-                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby '_col_0' limit 0 offset 1", 0, "_col_0", true},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby name ", 1, "_col_0", true},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby name limit 10 offset 0", 1, "_col_0", true},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby name limit 10 offset 1", 0, "_col_0", true},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby name limit 10", 1, "_col_0", true},
+                {"hive_table where (name = \"sales_fact\" and createTime >= 
\"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 
orderby name limit 0 offset 1", 0, "_col_0", true},
         };
     }
 

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/test/java/org/apache/atlas/query/GremlinQueryComposerTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/query/GremlinQueryComposerTest.java 
b/repository/src/test/java/org/apache/atlas/query/GremlinQueryComposerTest.java
new file mode 100644
index 0000000..ccf1cb2
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/query/GremlinQueryComposerTest.java
@@ -0,0 +1,394 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.TokenStream;
+import org.apache.atlas.query.antlr4.AtlasDSLLexer;
+import org.apache.atlas.query.antlr4.AtlasDSLParser;
+import org.apache.atlas.type.AtlasEntityType;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.commons.lang.StringUtils;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertNull;
+import static org.testng.Assert.assertTrue;
+
+public class GremlinQueryComposerTest {
+    private List<String> errorList = new ArrayList<>();
+
+    @Test
+    public void classification() {
+        String expected = "g.V().has('__traitNames', 
within('PII')).limit(25).toList()";
+        verify("PII", expected);
+    }
+
+    @Test()
+    public void dimension() {
+        String expected = "g.V().has('__typeName', 
'Table').has('__traitNames', within('Dimension')).limit(25).toList()";
+        verify("Table isa Dimension", expected);
+        verify("Table is Dimension", expected);
+        verify("Table where Table is Dimension", expected);
+        // Not supported since it requires two singleSrcQuery, one for isa 
clause other for where clause
+//        verify("Table isa Dimension where name = 'sales'",
+//                "g.V().has('__typeName', 'Table').has('__traitNames', 
within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
+    }
+
+    @Test
+    public void fromDB() {
+        verify("from DB", "g.V().has('__typeName', 'DB').limit(25).toList()");
+        verify("from DB limit 10", "g.V().has('__typeName', 
'DB').limit(10).toList()");
+        verify("DB limit 10", "g.V().has('__typeName', 
'DB').limit(10).toList()");
+    }
+
+    @Test
+    public void DBHasName() {
+        String expected = "g.V().has('__typeName', 
'DB').has('DB.name').limit(25).toList()";
+        verify("DB has name", expected);
+        verify("DB where DB has name", expected);
+    }
+
+    @Test
+    public void DBasD() {
+        verify("DB as d", "g.V().has('__typeName', 
'DB').as('d').limit(25).toList()");
+    }
+
+    @Test
+    public void DBasDSelect() {
+        String expected = "def f(r){ return 
[['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }; f(g.V().has('__typeName', 'DB').as('d')";
+        verify("DB as d select d.name, d.owner", expected + 
".limit(25).toList())");
+        verify("DB as d select d.name, d.owner limit 10", expected + 
".limit(10).toList())");
+    }
+
+    @Test
+    public void tableSelectColumns() {
+        String exMain = "g.V().has('__typeName', 
'Table').out('__Table.columns').limit(10).toList()";
+        String exSel = "def f(r){ r }";
+        String exSel1 = "def f(r){ return 
[['db.name']].plus(r.collect({[it.value('DB.name')]})).unique(); }";
+        verify("Table select columns limit 10", getExpected(exSel, exMain));
+
+        String exMain2 = "g.V().has('__typeName', 
'Table').out('__Table.db').limit(25).toList()";
+        verify("Table select db", getExpected(exSel, exMain2));
+
+        String exMain3 = "g.V().has('__typeName', 
'Table').out('__Table.db').limit(25).toList()";
+        verify("Table select db.name", getExpected(exSel1, exMain3));
+
+    }
+
+    @Test(enabled = false)
+    public void DBTableFrom() {
+        verify("Table, db", "g.V().has('__typeName', 
'Table').out('__DB.Table').limit(25).toList()");
+    }
+
+    @Test
+    public void DBAsDSelectLimit() {
+        verify("from DB limit 5", "g.V().has('__typeName', 
'DB').limit(5).toList()");
+        verify("from DB limit 5 offset 2", "g.V().has('__typeName', 
'DB').range(2, 2 + 5).limit(25).toList()");
+    }
+
+    @Test
+    public void DBOrderBy() {
+        String expected = "g.V().has('__typeName', 
'DB').order().by('DB.name').limit(25).toList()";
+        verify("DB orderby name", expected);
+        verify("from DB orderby name", expected);
+        verify("from DB as d orderby d.owner limit 3", 
"g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner').limit(3).toList()");
+        verify("DB as d orderby d.owner limit 3", "g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner').limit(3).toList()");
+
+
+        String exSel = "def f(r){ return 
[['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }";
+        String exMain = "g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner').limit(25).toList()";
+        verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", 
getExpected(exSel, exMain));
+
+        String exMain2 = "g.V().has('__typeName', 
'Table').and(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.createTime', 
gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
+        String exSel2 = "def f(r){ return 
[['_col_0','_col_1']].plus(r.collect({[it.value('Table.name'),it.value('Table.createTime')]})).unique();
 }";
+        verify("Table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby _col_1",
+                getExpected(exSel2, exMain2));
+    }
+
+    @Test
+    public void fromDBOrderByNameDesc() {
+        verify("from DB orderby name DESC", "g.V().has('__typeName', 
'DB').order().by('DB.name', decr).limit(25).toList()");
+    }
+
+    @Test
+    public void fromDBSelect() {
+        String expected = "def f(r){ return 
[['DB.name','DB.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }; f(g.V().has('__typeName', 'DB').limit(25).toList())";
+        verify("from DB select DB.name, DB.owner", expected);
+    }
+
+    @Test
+    public void fromDBGroupBy() {
+        verify("from DB groupby (DB.owner)", "g.V().has('__typeName', 
'DB').group().by('DB.owner').limit(25).toList()");
+    }
+
+    @Test
+    public void whereClauseTextContains() {
+        String exMain = "g.V().has('__typeName', 'DB').has('DB.name', 
eq(\"Reporting\")).limit(25).toList()";
+        String exSel = "def f(r){ return 
[['name','owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }";
+        verify("from DB where name = \"Reporting\" select name, owner", 
getExpected(exSel, exMain));
+        verify("from DB where (name = \"Reporting\") select name, owner", 
getExpected(exSel, exMain));
+        verify("Table where Asset.name like \"Tab*\"",
+                "g.V().has('__typeName', 'Table').has('Table.name', 
org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
+        verify("from Table where (db.name = \"Reporting\")",
+                "g.V().has('__typeName', 
'Table').out('__Table.db').has('DB.name', 
eq(\"Reporting\")).dedup().in('__Table.db').limit(25).toList()");
+    }
+
+    @Test
+    public void whereClauseWithAsTextContains() {
+        String exSel = "def f(r){ return 
[['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique();
 }";
+        String exMain = "g.V().has('__typeName', 
'Table').as('t').has('Table.name', eq(\"testtable_1\")).limit(25).toList()";
+        verify("Table as t where t.name = \"testtable_1\" select t.name, 
t.owner)", getExpected(exSel, exMain));
+    }
+
+    @Test
+    public void whereClauseWithDateCompare() {
+        String exSel = "def f(r){ return 
[['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique();
 }";
+        String exMain = "g.V().has('__typeName', 
'Table').as('t').has('Table.createdTime', 
eq('1513046158440')).limit(25).toList()";
+        verify("Table as t where t.createdTime = \"2017-12-12T02:35:58.440Z\" 
select t.name, t.owner)", getExpected(exSel, exMain));
+    }
+
+    @Test
+    public void subType() {
+        String exMain = "g.V().has('__typeName', 
within('Asset','Table')).limit(25).toList()";
+        String exSel = "def f(r){ return 
[['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique();
 }";
+
+        verify("Asset select name, owner", getExpected(exSel, exMain));
+    }
+
+    @Test
+    public void TraitWithSpace() {
+        verify("`Log Data`", "g.V().has('__typeName', 'Log 
Data').limit(25).toList()");
+    }
+
+    @Test
+    public void nestedQueries() {
+        verify("Table where name=\"sales_fact\" or name=\"testtable_1\"",
+                "g.V().has('__typeName', 'Table').or(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.name', 
eq(\"testtable_1\"))).limit(25).toList()");
+        verify("Table where name=\"sales_fact\" and name=\"testtable_1\"",
+                "g.V().has('__typeName', 'Table').and(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.name', 
eq(\"testtable_1\"))).limit(25).toList()");
+        verify("Table where name=\"sales_fact\" or name=\"testtable_1\" or 
name=\"testtable_2\"",
+                "g.V().has('__typeName', 'Table')" +
+                        ".or(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.has('Table.name', eq(\"testtable_1\"))," +
+                        "__.has('Table.name', eq(\"testtable_2\"))" +
+                        ").limit(25).toList()");
+        verify("Table where name=\"sales_fact\" and name=\"testtable_1\" and 
name=\"testtable_2\"",
+                "g.V().has('__typeName', 'Table')" +
+                        ".and(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.has('Table.name', eq(\"testtable_1\"))," +
+                        "__.has('Table.name', eq(\"testtable_2\"))" +
+                        ").limit(25).toList()");
+        verify("Table where (name=\"sales_fact\" or name=\"testtable_1\") and 
name=\"testtable_2\"",
+                "g.V().has('__typeName', 'Table')" +
+                        ".and(" +
+                        "__.or(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.has('Table.name', eq(\"testtable_1\"))" +
+                        ")," +
+                        "__.has('Table.name', eq(\"testtable_2\")))" +
+                        ".limit(25).toList()");
+        verify("Table where name=\"sales_fact\" or (name=\"testtable_1\" and 
name=\"testtable_2\")",
+                "g.V().has('__typeName', 'Table')" +
+                        ".or(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.and(" +
+                        "__.has('Table.name', eq(\"testtable_1\"))," +
+                        "__.has('Table.name', eq(\"testtable_2\")))" +
+                        ")" +
+                        ".limit(25).toList()");
+        verify("Table where name=\"sales_fact\" or name=\"testtable_1\" and 
name=\"testtable_2\"",
+                "g.V().has('__typeName', 'Table')" +
+                        ".and(" +
+                        "__.or(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.has('Table.name', eq(\"testtable_1\"))" +
+                        ")," +
+                        "__.has('Table.name', eq(\"testtable_2\")))" +
+                        ".limit(25).toList()");
+        verify("Table where (name=\"sales_fact\" and owner=\"Joe\") OR 
(name=\"sales_fact_daily_mv\" and owner=\"Joe BI\")",
+                "g.V().has('__typeName', 'Table')" +
+                        ".or(" +
+                        "__.and(" +
+                        "__.has('Table.name', eq(\"sales_fact\"))," +
+                        "__.has('Table.owner', eq(\"Joe\"))" +
+                        ")," +
+                        "__.and(" +
+                        "__.has('Table.name', eq(\"sales_fact_daily_mv\"))," +
+                        "__.has('Table.owner', eq(\"Joe BI\"))" +
+                        "))" +
+                        ".limit(25).toList()");
+        verify("Table where owner=\"hdfs\" or ((name=\"testtable_1\" or 
name=\"testtable_2\") and createdTime < \"2017-12-12T02:35:58.440Z\")",
+                "g.V().has('__typeName', 'Table').or(__.has('Table.owner', 
eq(\"hdfs\")),__.and(__.or(__.has('Table.name', 
eq(\"testtable_1\")),__.has('Table.name', 
eq(\"testtable_2\"))),__.has('Table.createdTime', 
lt('1513046158440')))).limit(25).toList()");
+        verify("hive_db where hive_db.name='Reporting' and hive_db.createTime 
< '2017-12-12T02:35:58.440Z'",
+                "g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', 
eq('Reporting')),__.has('hive_db.createTime', 
lt('1513046158440'))).limit(25).toList()");
+        verify("Table where db.name='Sales' and db.clusterName='cl1'",
+                "g.V().has('__typeName', 
'Table').and(__.out('__Table.db').has('DB.name', 
eq('Sales')).dedup().in('__Table.db'),__.out('__Table.db').has('DB.clusterName',
 eq('cl1')).dedup().in('__Table.db')).limit(25).toList()");
+    }
+
+    private void verify(String dsl, String expectedGremlin) {
+        AtlasDSLParser.QueryContext queryContext = getParsedQuery(dsl);
+        String actualGremlin = getGremlinQuery(queryContext);
+        assertEquals(actualGremlin, expectedGremlin);
+    }
+
+    private String getExpected(String select, String main) {
+        return String.format("%s; f(%s)", select, main);
+    }
+
+    private AtlasDSLParser.QueryContext getParsedQuery(String query) {
+        AtlasDSLParser.QueryContext queryContext = null;
+        InputStream stream = new ByteArrayInputStream(query.getBytes());
+        AtlasDSLLexer lexer = null;
+
+        try {
+            lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
+        } catch (IOException e) {
+            assertTrue(false);
+        }
+
+        TokenStream inputTokenStream = new CommonTokenStream(lexer);
+        AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
+        queryContext = parser.query();
+
+        assertNotNull(queryContext);
+        assertNull(queryContext.exception);
+
+        return queryContext;
+    }
+
+    private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
+        AtlasTypeRegistry             registry = mock(AtlasTypeRegistry.class);
+        org.apache.atlas.query.Lookup lookup   = new TestLookup(errorList, 
registry);
+        GremlinQueryComposer.Context  context  = new 
GremlinQueryComposer.Context(errorList, lookup);
+
+        GremlinQueryComposer gremlinQueryComposer = new 
GremlinQueryComposer(lookup, context);
+        DSLVisitor           qv                   = new 
DSLVisitor(gremlinQueryComposer);
+        qv.visit(queryContext);
+
+        String s = gremlinQueryComposer.get();
+        assertTrue(StringUtils.isNotEmpty(s));
+        return s;
+    }
+
+    private static class TestLookup implements org.apache.atlas.query.Lookup {
+
+        List<String> errorList;
+        AtlasTypeRegistry registry;
+
+        public TestLookup(List<String> errorList, AtlasTypeRegistry 
typeRegistry) {
+            this.errorList = errorList;
+            this.registry = typeRegistry;
+        }
+
+        @Override
+        public AtlasType getType(String typeName) {
+            AtlasType type = null;
+            if(typeName.equals("PII") || typeName.equals("Dimension")) {
+                type = mock(AtlasType.class);
+            } else {
+                type = mock(AtlasEntityType.class);
+            }
+
+            when(type.getTypeName()).thenReturn(typeName);
+            return type;
+        }
+
+        @Override
+        public String getQualifiedName(GremlinQueryComposer.Context context, 
String name) {
+            if(name.contains("."))
+                return name;
+
+            return String.format("%s.%s", context.getActiveTypeName(), name);
+        }
+
+        @Override
+        public boolean isPrimitive(GremlinQueryComposer.Context context, 
String attributeName) {
+            return attributeName.equals("name") ||
+                    attributeName.equals("owner") ||
+                    attributeName.equals("createdTime") ||
+                    attributeName.equals("createTime") ||
+                    attributeName.equals("clusterName");
+        }
+
+        @Override
+        public String getRelationshipEdgeLabel(GremlinQueryComposer.Context 
context, String attributeName) {
+            if (attributeName.equalsIgnoreCase("columns"))
+                return "__Table.columns";
+            if (attributeName.equalsIgnoreCase("db"))
+                return "__Table.db";
+            else
+                return "__DB.Table";
+        }
+
+        @Override
+        public boolean hasAttribute(GremlinQueryComposer.Context context, 
String typeName) {
+            return (context.getActiveTypeName().equals("Table") && 
typeName.equals("db")) ||
+                    (context.getActiveTypeName().equals("Table") && 
typeName.equals("columns"));
+        }
+
+        @Override
+        public boolean doesTypeHaveSubTypes(GremlinQueryComposer.Context 
context) {
+            return context.getActiveTypeName().equalsIgnoreCase("Asset");
+        }
+
+        @Override
+        public String getTypeAndSubTypes(GremlinQueryComposer.Context context) 
{
+            String[] str = new String[]{"'Asset'", "'Table'"};
+            return StringUtils.join(str, ",");
+        }
+
+        @Override
+        public boolean isTraitType(GremlinQueryComposer.Context context) {
+            return context.getActiveTypeName().equals("PII") || 
context.getActiveTypeName().equals("Dimension");
+        }
+
+        @Override
+        public String getTypeFromEdge(GremlinQueryComposer.Context context, 
String item) {
+            if(context.getActiveTypeName().equals("DB") && 
item.equals("Table")) {
+                return "Table";
+            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("Column")) {
+                return "Column";
+            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("db")) {
+                return "DB";
+            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("columns")) {
+                return "Column";
+            }
+            return context.getActiveTypeName();
+        }
+
+        @Override
+        public boolean isDate(GremlinQueryComposer.Context context, String 
attributeName) {
+            return attributeName.equals("createdTime") ||
+                    attributeName.equals("createTime");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/8db8b5c7/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java 
b/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
deleted file mode 100644
index b873141..0000000
--- a/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.query;
-
-import org.antlr.v4.runtime.CharStreams;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.antlr.v4.runtime.TokenStream;
-import org.apache.atlas.query.antlr4.AtlasDSLLexer;
-import org.apache.atlas.query.antlr4.AtlasDSLParser;
-import org.apache.atlas.type.AtlasEntityType;
-import org.apache.atlas.type.AtlasType;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.commons.lang.StringUtils;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertNull;
-import static org.testng.Assert.assertTrue;
-
-public class QueryProcessorTest {
-    private List<String> errorList = new ArrayList<>();
-
-    @Test
-    public void classification() {
-        String expected = "g.V().has('__traitNames', 
within('PII')).limit(25).toList()";
-        verify("PII", expected);
-    }
-
-    @Test()
-    public void dimension() {
-        String expected = "g.V().has('__typeName', 
'Table').has('__traitNames', within('Dimension')).limit(25).toList()";
-        verify("Table isa Dimension", expected);
-        verify("Table is Dimension", expected);
-        verify("Table where Table is Dimension", expected);
-        verify("Table isa Dimension where name = 'sales'",
-                "g.V().has('__typeName', 'Table').has('__traitNames', 
within('Dimension')).has('Table.name', eq('sales')).limit(25).toList()");
-    }
-
-    @Test
-    public void fromDB() {
-        verify("from DB", "g.V().has('__typeName', 'DB').limit(25).toList()");
-        verify("from DB limit 10", "g.V().has('__typeName', 
'DB').limit(10).toList()");
-        verify("DB limit 10", "g.V().has('__typeName', 
'DB').limit(10).toList()");
-    }
-
-    @Test
-    public void DBHasName() {
-        String expected = "g.V().has('__typeName', 
'DB').has('DB.name').limit(25).toList()";
-        verify("DB has name", expected);
-        verify("DB where DB has name", expected);
-    }
-
-    @Test
-    public void DBasD() {
-        verify("DB as d", "g.V().has('__typeName', 
'DB').as('d').limit(25).toList()");
-    }
-
-    @Test
-    public void DBasDSelect() {
-        String expected = "def f(r){ return 
[['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }; f(g.V().has('__typeName', 'DB').as('d')";
-        verify("DB as d select d.name, d.owner", expected + 
".limit(25).toList())");
-        verify("DB as d select d.name, d.owner limit 10", expected + 
".limit(10).toList())");
-    }
-
-    @Test
-    public void tableSelectColumns() {
-        String exMain = "g.V().has('__typeName', 
'Table').out('__Table.columns').limit(10).toList()";
-        String exSel = "def f(r){ r }";
-        verify("Table select columns limit 10", getExpected(exSel, exMain));
-
-        String exMain2 = "g.V().has('__typeName', 
'Table').out('__Table.db').limit(25).toList()";
-        verify("Table select db.name", getExpected(exSel, exMain2));
-    }
-
-    @Test(enabled = false)
-    public void DBTableFrom() {
-        verify("Table, db", "g.V().has('__typeName', 
'Table').out('__DB.Table').limit(25).toList()");
-    }
-
-    @Test
-    public void DBAsDSelectLimit() {
-        verify("from DB limit 5", "g.V().has('__typeName', 
'DB').limit(5).toList()");
-        verify("from DB limit 5 offset 2", "g.V().has('__typeName', 
'DB').range(2, 2 + 5).limit(25).toList()");
-    }
-
-    @Test
-    public void DBOrderBy() {
-        String expected = "g.V().has('__typeName', 
'DB').order().by('DB.name').limit(25).toList()";
-        verify("DB orderby name", expected);
-        verify("from DB orderby name", expected);
-        verify("from DB as d orderby d.owner limit 3", 
"g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner').limit(3).toList()");
-        verify("DB as d orderby d.owner limit 3", "g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner').limit(3).toList()");
-
-
-        String exSel = "def f(r){ return 
[['d.name','d.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }";
-        String exMain = "g.V().has('__typeName', 
'DB').as('d').order().by('DB.owner)').limit(25).toList()";
-        verify("DB as d select d.name, d.owner orderby (d.owner) limit 25", 
getExpected(exSel, exMain));
-
-        String exMain2 = "g.V().has('__typeName', 
'Table').and(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.createTime', 
gt('1388563200000'))).order().by('Table.createTime').limit(25).toList()";
-        String exSel2 = "def f(r){ return 
[['_col_0','_col_1']].plus(r.collect({[it.value('Table.name'),it.value('Table.createTime')]})).unique();
 }";
-        verify("Table where (name = \"sales_fact\" and createTime > 
\"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby _col_1",
-                getExpected(exSel2, exMain2));
-    }
-
-    @Test
-    public void fromDBOrderByNameDesc() {
-        verify("from DB orderby name DESC", "g.V().has('__typeName', 
'DB').order().by('DB.name', decr).limit(25).toList()");
-    }
-
-    @Test
-    public void fromDBSelect() {
-        String expected = "def f(r){ return 
[['DB.name','DB.owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }; f(g.V().has('__typeName', 'DB').limit(25).toList())";
-        verify("from DB select DB.name, DB.owner", expected);
-    }
-
-    @Test
-    public void fromDBGroupBy() {
-        verify("from DB groupby (DB.owner)", "g.V().has('__typeName', 
'DB').group().by('DB.owner').limit(25).toList()");
-    }
-
-    @Test
-    public void whereClauseTextContains() {
-        String exMain = "g.V().has('__typeName', 'DB').has('DB.name', 
eq(\"Reporting\")).limit(25).toList()";
-        String exSel = "def f(r){ return 
[['name','owner']].plus(r.collect({[it.value('DB.name'),it.value('DB.owner')]})).unique();
 }";
-        verify("from DB where name = \"Reporting\" select name, owner", 
getExpected(exSel, exMain));
-        verify("from DB where (name = \"Reporting\") select name, owner", 
getExpected(exSel, exMain));
-        verify("Table where Asset.name like \"Tab*\"",
-                "g.V().has('__typeName', 'Table').has('Table.name', 
org.janusgraph.core.attribute.Text.textRegex(\"Tab.*\")).limit(25).toList()");
-        verify("from Table where (db.name = \"Reporting\")",
-                "g.V().has('__typeName', 
'Table').out('__Table.db').has('DB.name', 
eq(\"Reporting\")).in('__Table.db').limit(25).toList()");
-    }
-
-    @Test
-    public void whereClauseWithAsTextContains() {
-        String exSel = "def f(r){ return 
[['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique();
 }";
-        String exMain = "g.V().has('__typeName', 
'Table').as('t').has('Table.name', eq(\"testtable_1\")).limit(25).toList()";
-        verify("Table as t where t.name = \"testtable_1\" select t.name, 
t.owner)", getExpected(exSel, exMain));
-    }
-
-    @Test
-    public void whereClauseWithDateCompare() {
-        String exSel = "def f(r){ return 
[['t.name','t.owner']].plus(r.collect({[it.value('Table.name'),it.value('Table.owner')]})).unique();
 }";
-        String exMain = "g.V().has('__typeName', 
'Table').as('t').has('Table.createdTime', 
eq('1513046158440')).limit(25).toList()";
-        verify("Table as t where t.createdTime = \"2017-12-12T02:35:58.440Z\" 
select t.name, t.owner)", getExpected(exSel, exMain));
-    }
-
-    @Test
-    public void multipleWhereClauses() {
-        String exSel = "def f(r){ return 
[['c.owner','c.name','c.dataType']].plus(r.collect({[it.value('Column.owner'),it.value('Column.name'),it.value('Column.dataType')]})).unique();
 }";
-        String exMain = "g.V().has('__typeName', 'Table').has('Table.name', 
eq(\"sales_fact\")).out('__Table.columns').as('c').limit(25).toList()";
-        verify("Table where name=\"sales_fact\", columns as c select c.owner, 
c.name, c.dataType", getExpected(exSel, exMain));
-                ;
-    }
-
-    @Test
-    public void subType() {
-        String exMain = "g.V().has('__typeName', 
within('Asset','Table')).limit(25).toList()";
-        String exSel = "def f(r){ return 
[['name','owner']].plus(r.collect({[it.value('Asset.name'),it.value('Asset.owner')]})).unique();
 }";
-
-        verify("Asset select name, owner", getExpected(exSel, exMain));
-    }
-
-    @Test
-    public void TraitWithSpace() {
-        verify("`Log Data`", "g.V().has('__typeName', 'Log 
Data').limit(25).toList()");
-    }
-
-    @Test
-    public void nestedQueries() {
-        verify("Table where name=\"sales_fact\" or name=\"testtable_1\"",
-                "g.V().has('__typeName', 'Table').or(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.name', 
eq(\"testtable_1\"))).limit(25).toList()");
-        verify("Table where name=\"sales_fact\" and name=\"testtable_1\"",
-                "g.V().has('__typeName', 'Table').and(__.has('Table.name', 
eq(\"sales_fact\")),__.has('Table.name', 
eq(\"testtable_1\"))).limit(25).toList()");
-        verify("Table where name=\"sales_fact\" or name=\"testtable_1\" or 
name=\"testtable_2\"",
-                "g.V().has('__typeName', 'Table')" +
-                        ".or(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.has('Table.name', eq(\"testtable_1\"))," +
-                        "__.has('Table.name', eq(\"testtable_2\"))" +
-                        ").limit(25).toList()");
-        verify("Table where name=\"sales_fact\" and name=\"testtable_1\" and 
name=\"testtable_2\"",
-                "g.V().has('__typeName', 'Table')" +
-                        ".and(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.has('Table.name', eq(\"testtable_1\"))," +
-                        "__.has('Table.name', eq(\"testtable_2\"))" +
-                        ").limit(25).toList()");
-        verify("Table where (name=\"sales_fact\" or name=\"testtable_1\") and 
name=\"testtable_2\"",
-                "g.V().has('__typeName', 'Table')" +
-                        ".and(" +
-                        "__.or(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.has('Table.name', eq(\"testtable_1\"))" +
-                        ")," +
-                        "__.has('Table.name', eq(\"testtable_2\")))" +
-                        ".limit(25).toList()");
-        verify("Table where name=\"sales_fact\" or (name=\"testtable_1\" and 
name=\"testtable_2\")",
-                "g.V().has('__typeName', 'Table')" +
-                        ".or(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.and(" +
-                        "__.has('Table.name', eq(\"testtable_1\"))," +
-                        "__.has('Table.name', eq(\"testtable_2\")))" +
-                        ")" +
-                        ".limit(25).toList()");
-        verify("Table where name=\"sales_fact\" or name=\"testtable_1\" and 
name=\"testtable_2\"",
-                "g.V().has('__typeName', 'Table')" +
-                        ".and(" +
-                        "__.or(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.has('Table.name', eq(\"testtable_1\"))" +
-                        ")," +
-                        "__.has('Table.name', eq(\"testtable_2\")))" +
-                        ".limit(25).toList()");
-        verify("Table where (name=\"sales_fact\" and owner=\"Joe\") OR 
(name=\"sales_fact_daily_mv\" and owner=\"Joe BI\")",
-                "g.V().has('__typeName', 'Table')" +
-                        ".or(" +
-                        "__.and(" +
-                        "__.has('Table.name', eq(\"sales_fact\"))," +
-                        "__.has('Table.owner', eq(\"Joe\"))" +
-                        ")," +
-                        "__.and(" +
-                        "__.has('Table.name', eq(\"sales_fact_daily_mv\"))," +
-                        "__.has('Table.owner', eq(\"Joe BI\"))" +
-                        "))" +
-                        ".limit(25).toList()");
-        verify("Table where owner=\"hdfs\" or ((name=\"testtable_1\" or 
name=\"testtable_2\") and createdTime < \"2017-12-12T02:35:58.440Z\")",
-                "g.V().has('__typeName', 'Table').or(__.has('Table.owner', 
eq(\"hdfs\")),__.and(__.or(__.has('Table.name', 
eq(\"testtable_1\")),__.has('Table.name', 
eq(\"testtable_2\"))),__.has('Table.createdTime', 
lt('1513046158440')))).limit(25).toList()");
-        verify("hive_db where hive_db.name='Reporting' and hive_db.createTime 
< '2017-12-12T02:35:58.440Z'",
-                "g.V().has('__typeName', 'hive_db').and(__.has('hive_db.name', 
eq('Reporting')),__.has('hive_db.createTime', 
lt('1513046158440'))).limit(25).toList()");
-        verify("Table where db.name='Sales' and db.clusterName='cl1'",
-                "g.V().has('__typeName', 
'Table').and(__.out('__Table.db').has('DB.name', 
eq('Sales')).in('__Table.db'),__.out('__Table.db').has('DB.clusterName', 
eq('cl1')).in('__Table.db')).limit(25).toList()");
-    }
-
-    private void verify(String dsl, String expectedGremlin) {
-        AtlasDSLParser.QueryContext queryContext = getParsedQuery(dsl);
-        String actualGremlin = getGremlinQuery(queryContext);
-        assertEquals(actualGremlin, expectedGremlin);
-    }
-
-    private String getExpected(String select, String main) {
-        return String.format("%s; f(%s)", select, main);
-    }
-
-    private AtlasDSLParser.QueryContext getParsedQuery(String query) {
-        AtlasDSLParser.QueryContext queryContext = null;
-        InputStream stream = new ByteArrayInputStream(query.getBytes());
-        AtlasDSLLexer lexer = null;
-
-        try {
-            lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
-        } catch (IOException e) {
-            assertTrue(false);
-        }
-
-        TokenStream inputTokenStream = new CommonTokenStream(lexer);
-        AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
-        queryContext = parser.query();
-
-        assertNotNull(queryContext);
-        assertNull(queryContext.exception);
-
-        return queryContext;
-    }
-
-    private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
-        AtlasTypeRegistry registry = mock(AtlasTypeRegistry.class);
-        org.apache.atlas.query.Lookup lookup = new TestLookup(errorList, 
registry);
-        QueryProcessor.Context context = new QueryProcessor.Context(errorList, 
lookup);
-
-        QueryProcessor queryProcessor = new QueryProcessor(lookup, context);
-        DSLVisitor qv = new DSLVisitor(queryProcessor);
-        qv.visit(queryContext);
-        queryProcessor.close();
-
-        String s = queryProcessor.getText();
-        assertTrue(StringUtils.isNotEmpty(s));
-        return s;
-    }
-
-    private static class TestLookup implements org.apache.atlas.query.Lookup {
-
-        List<String> errorList;
-        AtlasTypeRegistry registry;
-
-        public TestLookup(List<String> errorList, AtlasTypeRegistry 
typeRegistry) {
-            this.errorList = errorList;
-            this.registry = typeRegistry;
-        }
-
-        @Override
-        public AtlasType getType(String typeName) {
-            AtlasType type = null;
-            if(typeName.equals("PII") || typeName.equals("Dimension")) {
-                type = mock(AtlasType.class);
-            } else {
-                type = mock(AtlasEntityType.class);
-            }
-
-            when(type.getTypeName()).thenReturn(typeName);
-            return type;
-        }
-
-        @Override
-        public String getQualifiedName(QueryProcessor.Context context, String 
name) {
-            if(name.contains("."))
-                return name;
-
-            return String.format("%s.%s", context.getActiveTypeName(), name);
-        }
-
-        @Override
-        public boolean isPrimitive(QueryProcessor.Context context, String 
attributeName) {
-            return attributeName.equals("name") ||
-                    attributeName.equals("owner") ||
-                    attributeName.equals("createdTime") ||
-                    attributeName.equals("createTime") ||
-                    attributeName.equals("clusterName");
-        }
-
-        @Override
-        public String getRelationshipEdgeLabel(QueryProcessor.Context context, 
String attributeName) {
-            if (attributeName.equalsIgnoreCase("columns"))
-                return "__Table.columns";
-            if (attributeName.equalsIgnoreCase("db"))
-                return "__Table.db";
-            else
-                return "__DB.Table";
-        }
-
-        @Override
-        public boolean hasAttribute(QueryProcessor.Context context, String 
typeName) {
-            return (context.getActiveTypeName().equals("Table") && 
typeName.equals("db")) ||
-                    (context.getActiveTypeName().equals("Table") && 
typeName.equals("columns"));
-        }
-
-        @Override
-        public boolean doesTypeHaveSubTypes(QueryProcessor.Context context) {
-            return context.getActiveTypeName().equalsIgnoreCase("Asset");
-        }
-
-        @Override
-        public String getTypeAndSubTypes(QueryProcessor.Context context) {
-            String[] str = new String[]{"'Asset'", "'Table'"};
-            return StringUtils.join(str, ",");
-        }
-
-        @Override
-        public boolean isTraitType(QueryProcessor.Context context) {
-            return context.getActiveTypeName().equals("PII") || 
context.getActiveTypeName().equals("Dimension");
-        }
-
-        @Override
-        public String getTypeFromEdge(QueryProcessor.Context context, String 
item) {
-            if(context.getActiveTypeName().equals("DB") && 
item.equals("Table")) {
-                return "Table";
-            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("Column")) {
-                return "Column";
-            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("db")) {
-                return "DB";
-            } else if(context.getActiveTypeName().equals("Table") && 
item.equals("columns")) {
-                return "Column";
-            }
-            return context.getActiveTypeName();
-        }
-
-        @Override
-        public boolean isDate(QueryProcessor.Context context, String 
attributeName) {
-            return attributeName.equals("createdTime") ||
-                    attributeName.equals("createTime");
-        }
-    }
-}

Reply via email to